From 15a52ee861e62135fea2ff66ac963dba5fc0a2ce Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 28 Jul 2025 06:40:38 +0000 Subject: [PATCH 01/20] adding info upto now --- README.md | 12 +- artifacts/raw/online_dashboards.jsonl | 17 +- notebooks/cache.db | Bin 0 -> 466944 bytes notebooks/compare_benchmarks.ipynb | 305 ++++++++++++++++++ scripts/benchmark_commits.py | 26 +- scripts/collect_commits.py | 3 +- scripts/detect_breakpoints.py | 36 ++- scripts/download_dataset.py | 64 +++- scripts/filter_commits.py | 62 +++- scripts/scrape_repositories.py | 2 + .../collation/collate_benchmark_results.py | 12 +- src/datasmith/detection/detect_breakpoints.py | 4 +- src/datasmith/docker/Dockerfile | 3 + src/datasmith/docker/entrypoint.sh | 28 +- src/datasmith/docker/orchestrator.py | 15 +- .../execution/collect_commits_offline.py | 105 ++++++ src/datasmith/execution/utils.py | 47 ++- src/datasmith/logging_config.py | 4 +- src/datasmith/scrape/code_coverage.py | 9 +- src/datasmith/scrape/scrape_dashboards.py | 30 +- src/datasmith/scrape/utils.py | 37 +-- 21 files changed, 715 insertions(+), 106 deletions(-) create mode 100644 notebooks/cache.db create mode 100644 notebooks/compare_benchmarks.ipynb create mode 100644 src/datasmith/execution/collect_commits_offline.py diff --git a/README.md b/README.md index 1afcafd..cbb7ca5 100644 --- a/README.md +++ b/README.md @@ -146,11 +146,12 @@ The scraper can be run using the following command: ```bash $ python scripts/scrape_repositories.py \ --outfile artifacts/raw/repos_discovered.csv \ + --min-stars 500 \ --filtered-outfile artifacts/raw/repos_valid.csv # Writes artifacts/raw/repos_discovered.csv and artifacts/raw/repos_valid.csv ``` -The `artifacts/raw/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / pass other sanity checks. We found ~700 filtered repositories for this dataset. +The `artifacts/raw/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / has atleast 500 stars / pass other sanity checks. We found ~700 filtered repositories for this dataset. ### 4. Collect relevant commits for all repositories @@ -158,7 +159,6 @@ The `artifacts/raw/repos_valid.csv` file contains a subset of the repositories t Given the list of repositories, we find the subset of commits that have already been closed and merged into the main branch (the top 5000 PRs, sorted by popularity). We use the `collect_commits.py` script to do this. The `filter_commits.py` script then filters out those commits that primarily modified the benchmarking files (e.g. `asv.conf.json`) or were not relevant to the benchmarks (e.g. documentation changes). The script also limits the number of repositories to a maximum of 350 to ensure we don't burden the GitHub API with too many requests. The scripts can be run as follows: ```bash -# 50 pages * 100 (PRs per page) = 5000 PRs max per repo. $ python scripts/collect_commits.py \ --dashboards artifacts/raw/repos_valid.csv \ --outfile artifacts/raw/commits_all.jsonl \ @@ -178,6 +178,8 @@ $ python scripts/filter_commits.py \ Once we've collected the relevant commits, we can benchmark their performance using `asv`. `asv` includes many quality-of-life features to ensure that benchmarks are robust to noise and that the results are reproducible. Our script benchmarks multiple commits in parallel. Proper benchmarking requires some system tuning. Refer to the [asv tuning guidelines](https://asv.readthedocs.io/en/latest/tuning.html) for more details. +The `dependency_recommendations.json` file is a dictionary that contains recommended dependencies for each package. The key is an input to `pandas.query` for the `filtered-commits` dataframe, and the value is a list of dependencies that should be installed before running the benchmarks. For example, certain commits in `scikit_learn_scikit_learn` repository require `numpy==1.22.0` to run properly. This is a stop-gap solution to ensure that the benchmarks run correctly. + ```bash # in a root shell: (sudo) $ export OPENBLAS_NUM_THREADS=1 @@ -187,8 +189,9 @@ Once we've collected the relevant commits, we can benchmark their performance us # in userspace: $ python scripts/benchmark_commits.py \ --filtered-commits artifacts/raw/commits_filtered.jsonl \ - --max-concurrency 15 \ - --num-cores 4 \ + --dep-recs artifacts/raw/dependency_recommendations.json \ + --max-concurrency 30 \ + --num-cores 2 \ --asv-args "--interleave-rounds --append-samples -a rounds=2 -a repeat=2" \ --output-dir artifacts/benchmark_results/ ``` @@ -295,3 +298,4 @@ flowchart TD - [ ] FormulaCode: `asv` supports profiling the benchmarking function. We should collect such profiling data for all commits in the dataset. - [ ] FormulaCode: In `search_commits` replace the endpoint with `"/search/issues?q=type:pr+is:merged+repo:{repo_name}&per_page={per_page}&page={page}&advanced_search=true` endpoint to use each query more efficiently. - [ ] FormulaCode: Make an object oriented API for the dataset. Do not rely on a folder structure. +- [ ] FormualCode Docker: need to get relative path from asv.conf.json instead of assuming root directory is the base directory. diff --git a/artifacts/raw/online_dashboards.jsonl b/artifacts/raw/online_dashboards.jsonl index fcb6439..ab06de9 100644 --- a/artifacts/raw/online_dashboards.jsonl +++ b/artifacts/raw/online_dashboards.jsonl @@ -1,5 +1,12 @@ -{"url": "https://pv.github.io/scipy-bench/", "output_dir": "raw_datasets/downloads/scipy"} -{"url": "https://pandas-dev.github.io/asv-runner/", "output_dir": "raw_datasets/downloads/pandas"} -{"url": "https://scikit-learn.org/scikit-learn-benchmarks/", "output_dir": "raw_datasets/downloads/sklearn"} -{"url": "https://spacetelescope.github.io/bench/astropy-benchmarks/", "output_dir": "raw_datasets/downloads/astropy"} -{"url": "https://pv.github.io/numpy-bench/", "output_dir": "raw_datasets/downloads/numpy"} +{"url": "https://asv-runner.github.io/asv-collection/pandas/", "output_dir": "artifacts/processed/downloads/pandas"} +{"url": "https://asv-runner.github.io/asv-collection/xarray/", "output_dir": "artifacts/processed/downloads/xarray"} +{"url": "https://asv-runner.github.io/asv-collection/distributed/", "output_dir": "artifacts/processed/downloads/distributed"} +{"url": "https://asv-runner.github.io/asv-collection/pymc3/", "output_dir": "artifacts/processed/downloads/pymc3"} +{"url": "https://asv-runner.github.io/asv-collection/scikit-image/", "output_dir": "artifacts/processed/downloads/scikit-image"} +{"url": "https://asv-runner.github.io/asv-collection/joblib/", "output_dir": "artifacts/processed/downloads/joblib"} +{"url": "https://asv-runner.github.io/asv-collection/arrow/", "output_dir": "artifacts/processed/downloads/arrow"} +{"url": "https://pv.github.io/scipy-bench/", "output_dir": "artifacts/processed/downloads/scipy"} +{"url": "https://scikit-learn.org/scikit-learn-benchmarks/", "output_dir": "artifacts/processed/downloads/sklearn"} +{"url": "https://spacetelescope.github.io/bench/astropy-benchmarks/", "output_dir": "artifacts/processed/downloads/astropy"} +{"url": "https://pv.github.io/numpy-bench/", "output_dir": "artifacts/processed/downloads/numpy"} +{"url": "https://asv-runner.github.io/asv-collection/dask/", "output_dir": "artifacts/processed/downloads/dask"} diff --git a/notebooks/cache.db b/notebooks/cache.db new file mode 100644 index 0000000000000000000000000000000000000000..c414d565036ece249ce0428968cf9af2b73c3dc5 GIT binary patch literal 466944 zcmeFa39zNHGNzDgR6X0)|N+l*%Jz2Cl!kkBkGX}YIpMzaVcBOiUs``vq{&pqep zEZx&Hr7{q+sC+S1t_FcqE|UrbZ~{0GhjJN+Qy~z?fhuByNr+wLkfIEsNEMZ$z>ZCR z&-=Zn`<%Y_eCO@%*Dj;#5klQF)91PW^KQ@bfBw%apL@PPQ^%UoWWSgl3$MHHy6dmM z?o-E(U3cAeH}T&I|6TlbJ-?8@xA;qbzv-`=uG{|Z$L_xOPp`Y--rKLc_x5}L{-!^0 z?Vn%!2CjVr*S>*k-@vtR;MzBE?HjoE4P5&Mu6+a7zJY7sz_oAS-|IK<9b4Dm`rzY_ zU;mx=&x&%OcKfrvdD+`nv!X6$#U;PJ;hC3@KlSSIW3N8-^z+A$UGfi)J=)BN)vP}n z_J+m2I`-=EZ+-QNVzN8mSHoGa9E{3iPe1?S(@#v)bUrYD<5>H*UV8btQ%}A8g<~g= zf8ml(dE(g0Cp`ApCvLdq!OuN@{dN6et=^b^Wk3|)E9SG2{(bL~AF1bG@;7eeySz=l z&zt%0_4oc8`Q@bjANl3Azpi})*S>*k-@t#o-@tz|z2TPlJIe3c>+P!9rPtlJ-}D1t zHb45oJC6Dn&Oi3m^9Q%7VLj%)oSi?2b|z{(n(j=i{^|Z~YoLnBaP>FbpZq(`bZ1W$ z^>k;y=nv0NzvuimrYe&N{p$Hg4{qe&Ie&2PrT^x^t;12Rdj0zR!K0_1J(aw+|M~qF zpZ(mMr(S>V+>65#gD(zGohd%^`MocG_SBj3TRuH5cR%ro^9MK1Cxi0`C-!Eu@$|`^ zonqYIZm;p}YP5evKk>mWqw;lC&Ccteb-GvZ^E7^zhQ8mFXz3B^J*S>Me)h$|i9z+5{LSK9Up`X} zPe(62d+HqP%R1g9Zov72yOwv?`Geb=`CyE=6O5+|Ye&1XIloja895 zAMOS|=guh$GEwzq|6u0N@ngvj`RZGGXY+ygPv_;dn)Juwj+!4Zwf_NE{xqL9n(P+C z{+q?pr)}GR8h?7qccrh|7U7QlAJ%`0O{>nZ7Y&jp?f-cD$KKk}hp9TpSM&CXs#O0B zU-t4xUdHNO^5-wyk9WPe^evq~xJUL#?@V8v?V0;?JTC|R3M^CYwAcF_B>HYGs#DA1)Nsr%qKm2g`Xs!@YW9hzdc6xSKJdtIhqu+ z(c-RnAzt;~V%6@8HnJfB)i&7Z&Q|@{ON3GHX)HV7Mtr{$+N9|@Rg<*<>tEN z!|#&cUo3H4=_fBf<*l7X%J$&?HY;rYN&Ahwc!jHH(!xhr;pf-oLbE-tG^@M#l%2O8 z9~ZMdvYWD$<6@%PAN++Y{K3tK?7USLQ}xyD?d`XupwGwOv9Bf<&h?e%w%gC(XDudF zU(T4H^-6Q^Z!JEh?atCwy@nt4?wTs!U<41L{VuNb7P{~;pl=tn{k$u^h1<_q+Htp! zDRE!r6|6sE=`-)y?+?^;HX1JO;49@vT>PA+PrrXSn)RE0wOrx+%U8J7RzB*jZ=Uvt zm3pEW44z;Y&-zuLeNA0XwjQ-AwfLT{^p0vj11N9}_{_T|ipt-G2k=Tib@3^0?dbaS zy|o$-M(399rz^ewmp+IupqeNc4t1}9i}Q}a^Sv!E+w#*_z5K~2d@{_w$iFimL&UuJ z?*#Fdmu~stt6uaZ{}-hEJM6dV-l2cRf7MT(^n)iOpMT|6xw|#vK^P9OYWV*UJvF&u zxq!M-YL$PLKRTW6wSV*pe*dY9fAGl-fAIEdz;U>?4B7KyO;Ba zgbs{P*2g*%OMw4TYysXajr>cJB*z@Ky*capa4bAPki$BAMw14J2<@E~%{Y%R0 z#{P6?)#?og_nmsSN?!fU;Po$lOZ?^+Uw!Qp=jRkkh4j*sZ-DzkzKs5~H&grLLCXqy z_gPgMccdp3)lt**Ch9BmK133}gIi{!ale|L{}SY-yM@cVRSb*4xoMv=>Ar?rvES8g6bY`lnc*JD0xs^QZXE zZ(D4lc1zW&DKz_8mulICno)OfpWy1X=VdlJt%m1cIJlpEFdOa5UK%TIPWFggaLZg2 zS;)ORuYwGERT&mr@eO`ToJQr?_CG$Tmh+!uTrlJBZ@^{ zqZTro#!@7TfRGLrTlIk;>d68wj&As_91 z#I!oW}(bOb2e}8vVj;^KHWaTgvl$ z*B?BZrcIfpnNnG!(meB3+$4z?syYaZq%5k&FQcIHs`Gmfocg|-_ijAsFW!_(F1Jm= zFE3xqTiCHSIhXT&+rd5ZAQrPWvgI1syW`*%*r`bro!`6b;3gqMXedn-E3XOis%na$ z2$d>=FiYa1OoF<}(kcjQKg&Z8C@24x6C~Ca4U$_034Vu$;6)yPX$X?U&=BG@Ns?TZ zjn7Y9W_73%<%d8ht01X7FDla@hdO)yDu4VoZ)R=H{P=G-KmN|AUwh%%=U({C_PPDR z+SQ1YB*?uSK%oj7wo#F=HeM7aWm3gi>L+C#MUgKX>t}AbN<5knvtw=2;Cbf)&vUQ5 z^4jseYT&m41@NX&Od3e>2T}RW!LuxFck}diOb5FKd$q z;Rn_rBx}(0cRSH}s#JAWWwD?5We|s~Qyu3`7!`4yWqFx;S<`UF1l{Lubb`*>q(S$w zHR$S5)$8x$2Axq7fpx-+vf8V{T4lKpdq0Si%1;7SsKN`Ys3`I%@clB4^`U#k&7re4 zxdCQFEI236BR(vVekaV>FM_JB^C%D4tc6^RRnp`f#X?niSme2?n>^Bw*4=Ja)7qqg zd2F$oFTM8s^Szgkf9|#8ue^HUirL<;*It|r`!eUgZ_27pW432qa@FK%kR?r6S4mcs zp3kEuTlEKTa7Zk+G*h2cCS}jRDR0k8AU-+=T)H6I_DgERb8=iY*rO` zrC){u>94xkmewW>qz^9EOJ`KnWUC?Jw58sl2SP`qo%=Bfo2sdJ3U~q1C{rB1&=12p z%GjG}Q&(jj`&rb0@8s8=teCY$1MlHW;q8y=`9O7|YKkT(SfW7Xb(v=kFG8ILDoOJg z&m@Rr->=gu)-S@pbhHs|O&VDDE?~8c_|`W(%CX7<#R`Q{RV8uh2TjG6tMW7tiXx~Z zFOD0Pgh_)tc6AsVP^GcZ1M2@EPlqy~Gl{%jx@;;wpQdwg!~g^?EtFBsQ^mzR;s zqonZS0PnYs@wd__4y%ezuS8$^-|psgS(`Kf?_U6v_@p7Pv3{khAS|o63F@?t{3;Df z6(y;PD!(l2D)#dtZbC0?K;vSitgT(TQWt>i6qEg}`H=p>8PC?V^F*nXO;&TKr+E~6 zsh1LXg>{mpsp5(+OI0OF2}ZdGf7wm2Wo^;`eC*Il0g&W)QO&jp+Q}|eR2Mr@Cb`F5 zsG7RS!`Sy?LQ9se&YQ^hf-H>7D6WdQ&?tY*&FgG!(xANHD_>kjQk~W5t!j@_ucFJY z6QSp+q{#dxt7?x^7dN>6gfz9PqdKlaJn1m@%0}+{Q!_U=qqRwc@Pe;=DZ(j<(i!pp zt(^#K{0+8hp?np2egz|Ifzx3@($=n?eE=rm3NE@OKnXWVDCKw*t{;bs(vT3 zB2!sVMp2dp5_snc`3{wl+wqGy^GM_5c@dU@M)vd_PLNrfG{_!ZgG@hnTrc~?INf(k zNwTVmBR?-mFL2ZnI+x2hRz+5*G7Y^b_tT;b^~w9|Zr(a;lLq7k&;R1dQ~RxdqK2I- zNGEm>g}KVhFc7!DAc`x>IwKjx->k~0@%*R~C*^@t_uuVg1+7gQa34Lqg6-m!READ( z=`v6!#*|3k_P!LqOE@ZR4xQWV4=S}{>El%KBn>6s=cNpG$*fx81Ld(hI zd3BU$VO}Q`6qc(QmxT1{N49BSw3r%2IJ#LET3j@^k$Qyl1g@4 zm+xH5II4NV@t~_nP{pCgc8$XXu;iM^i6uTLQ;qXKcM~RAn>09&a6ZrODd;?%z_?8E zL=`2upwj2?72GJbM?l zoFq_|q+Ug_Ee)%h7d!I8lG+=kHtHSkak3WHCJl%Ox5pE;C2UKw3yUI0(NS}&y?~Np z5kxUgKfXR!M1}VgG=*ZVLjIATQis#e*QedYrPd}5rkl5?V}vhcYaNxR^COZxbwe5} zF5*ZkGJZ<=iPA+vaayNL;*ip`o9vIZHS@#Yt$+BPYQLU#CgkEMjmXG(UM%^gj7T%2 zE|`+`4wM&@HT9C9EcDiT$c+n_+nQ{V5dYtHaMv@>z53Lv&%OATe4B8@ z0}iAAA>&R;o$3#RK9%dC++V8sWy-xO%wMhfx&R32<^V-YLz50c& zb3xfVR5{5o6=VfDusEv9AfuenG^CA4?{S6Igg>&UKJI2YtW6pm_Y~7Jy%PTA-hLr+ zNbBB1jxP}nyGU4CjY5|%_M0HA8{8I(24tN}h#4g>x{GRvwKap{p;Z(+7QSNGk@QH!Vo(I-(*MMI`*2 zlr%ZyiiV^C?!V;EL7K<#?_?|bPFBp?q5=4!^#H2@^k|ah9d%(|)WVO6F2ju5kYJO- zeNZJhdu2(EJde3f{Gf)3^1!KmH>D11lLp`iI{*^)jD}tK@JZ_e76LJ7gE(cK@L2@ylE%Ujf{rw-biworoY|CIPuf7~&9k_Ii-=#?gh?KdNU6Od zOp8VnpqksAz_d2Gc_F1~W}n`gE`(J`xmeiafRuLk5bwSQW zw(D;^SPteNK

rfF5cX@-=ZH57RQ{8W;tHD&X*zWS(>=fT>f z!E?vOAZcyVA&Pm>Qe3Z~AtT2<0ezH+JuOo9UFt`Qs3;3!0tXjK2Wx8v!2=f&*d!bR z>QT5LdF3ZwQbW1=m@Pbgro|wRxs1UIY@(HLW2~Y7{9mm_u-e$k-|&uxYSA=Z0))ZPI}KxGh+OYt-(D zsHYh8M#I6mu0@s_F73n-bbKBSl1M)7-LbU6Bt+{;Nh?X6&|N2s{7E-7VQZ5HcWjG$ zufMxHEl1?#yAW3}ZfJEbE0PdI>lq`Cs0C36Zs@@F{el3&%S^QQgKj!otxX!lkuBoc z9-2AzhI5>AL(%30ftbihHz<(xtx}vgdhS^9EFk{BBO^81Qb8Vk+{szqY}S}Gpg(j7 zbbmTR-9$zxA}JzGpdHU=O5W4wgeG;xUocM#A38eRiYBLt1|a~tnIUs(vh*7dI00yF z(g1v7BS6$m=&7kXfzo%U64-uzP6Q|%1yTugMum8K6+9)f-)Rzrz78th_OKJ6)+PG@Ax$$1DO&VZ_Z{X8v zGNd(cTopYwE1oQ(JpFyuJ2UMKMUH2T<`a{Ox^Nd#IZMk>(Z$-sJNjjK!P|R z+g3oDCl~H(Z?}A>lU;0W(tv*G5ai^ z9S$h>@nQSAHA6-cUy{{^2feC)?>1MdBY?vJHcqHAx4y7pH@WIH`v7Em^Im=kr;LPJ18*S!+gT-RIvTQ_&3wMm2R zgNLyVMzda1OlPO}71@q1e6)-rz2w1yBP&(Xdy-SEkD8iHKt*n`&behgU+F5u^Y=Sh zJ8P2$--nOJx7Sw_Eiy=C)rByl*y)ifElFfclMt{5Nju zEY>Cs!bgurh&(BMm$)p1j%?R1kV0urDO|%|rd&)-0;ma#Tz;2n-5 z(4C$2hWnikOUkp((@seugZK#pn((F8l6Fa&NgFHfex;;OKQD zT<8so?)qlNy-`7efeQn3GEXVSbI%YishpPeoSZXS3M3+BSANR&@+AJhbKSh_wuoVE z)*w8bf7DBd=rKB`5-=70$yp>+yAEJnrX(lGg*Rm8yd?E{G988Pys) zBw(TOM-&sKA376X?}1YXZiu7SCJpG19=&_faIL}oCeq8(GmuKw6H4gjA*Yu_BwGoJ z=t{z?sWP8ag!~$cJoH@fpSCCZfBCeN6KZYJU|e(cYyj5eA-5)hCOwM6V-v+4?jjnm zQj)Bk(5R%Tz0m}{Uv-mAwKi#h9WKD>9ecKzbS@d`Gs+rdmnaEP%qDk8TL3*-1WQN( zil~I5YtGl#&Y!qBi`FI$vcvIGyCWC!(;_0>wJQ_2qcvb&~NiFXFuR%S6Z7i5FbBu@+Q?8zlS~zE&8OJh9sbNMa8)bC*^K(OthdCC<8X7 z2ZN2ej*w&#e68vP%>@=X*M8em?{k9F+N8nxu0uGd#croFnj;TY!m30Ol{=i1Mo>}E zR{-_UC!Q$jl7_obXlL!~j`G{KCJm_VBS1->4E1vca?fX?$~l0kq*Wa|ZckbY8nWAT zuRva)sfDIt*cX+T(PdDxV~P1uQuTF<)U9sXa;!}nsE7U1>258`Wn}S1KCf$g(nJi^ zFh&R~ZgRvpL!Yn^Hx9YEjE(@*RcX+JIw}v~Z#kj>+nO}+4nGc49Ghvs>8`ZFU#fV* zD`+QWn8X=%efqxX*z;H6Xy)#Y@{wjF@~=FQb!cueyf{{#nz+&c=%!wg}$!sNGT~OE7~J? z-+1^Sn^0aM2rPN-Xi&mkN$GEIK|Sh5WWm~`0rkE^7t3r)4sBx&G=qy#rPKmY#LjD6 zYWl2U*>Uc0!_uaxskcCe(r@~GZrID#CJnS@Z$YB!D3TC>L^aVGJWowfL*`4LQKd5`N(P%JT%eRShE&|cnpYdCY4q1%%*2sI zb0<$Zxg4x58a($e99(o7OMV*;qEFwrL zk}SmKNXum~pQ<+9@ZY%6=dw0w(0=qV+FF(UV%Q6VlBPdV)7hTAvA)llM#g~FFlf1G zEJKMv8$(n^iE(IcM3;;t1jUkCSJeA%VjF9d1|zEN7jBCky)}1MFq%m(dL&aPR-WM| zNs(JabDt7xh!j6pJ>hAB>?pAv3MX(V+BoidH|uF_a{YSls>P^13SU!VfzBrz?dX6< zErVMfM1_ywNkp48uNz&RlxKZmu`$#_7u5INWZ$h#c4KUtI7pzh+OZu$067!N$(%sQ zlurWIrPmw#7?+z|EeuHfb=v=kT8G4;l6{7_^tp(HD+XwrG^D z%UIC_fC6WLN`FP&+^5talqDQyJTd)re}^0Q-`b=B_UKw*bm7eAOfDKBg5CwPCc}kJ z7wIlhRG<`8OKWlNlh{MSJ`dB}D@(M8Wc6w{{)x58^&q9Q@r^CkZ)=gp?Nqml;u`%? zfkmw^g;mLp%tDGPM3cyn#?W38*v*<*_~UK}&DJIj$VU#ZWDAlZ3nXs4;pL??7}F{q zA`VD10pdylw81=PZ7D@VXA#<;W)R%>wyX35+9W*XFFTV4-n-Yp>%;|BE@rR{jl^_= z!N3ujkC>b+&pw5PP*L5hnWLixeqM4@_OdodC-Cs;IWm znQ1|07&@EJpe+wip9BF6_)@qwR34$YrPRPPos;`bH_)w38t9+exbUmc?RULQhp3VW z^R`)=G|->!*z89^zns-a;u$t1 zZR0SR60)BaGXz7q@@R$+LVO-{K%wGkF2&EBaB|kIO2@wr~D&RDNG-G`K= zCC!2GPl9fYp6b91Ncxt^-Zi35o+WM9=&!js>((YM&iX3o%UW&7K`7bcz%Wf{i?L$bMur(jdzmBLf28P_}@h$r#oq%{TG6tNSJ{=RnBtD7gMY60dRb zx%QaUS(0f+I31B9#si{dM@C=1jhlC!90+TZ=G*w}Wqlj-lDX{jUJ!VZ^_E~CByb~{ zh1>AtRW;s#aIPDfC5VKi&N)Gp)?b1nXV==K`4XPL;xFNHb^~qi0h%|Wn?_j>>3I4C z8305|I1<4|n0HZ$I+c#|9}AuA25Xb%i#RgHvG$fY+aFq{IDFJpGLZr2^@l0y5%iLT zOsObJQXDThRyFlfU8BD@a01=hq=Ejv!+StycF4(@a@En9W0@z$d^qG#i>#z-h138& z%rt+}_eonmJTMk&;%8^lfIYSbY&-j?3!GwX6}NOo{29O~p)Zt; zTGPYi$3c;Tk*p*eSij<5bJIp=ZPI`{qUdO@4pHMm%L4wcn*xrtNrUl-qT>oi>!KrdRZoVYR*co4G)LS~Gh&m{6%%zJ z>@hA+dY0SQciW9uW^K}7{ovuvI)`vl)cB(!>v*PXzedbh#?3~5S+o-97ooPJr?6!5 ztl)ByJVr!LQ`R1KL;JNhY4E-GFg_lQn#r3|3?Tl$XKhv-Wr#>9G|*;ZsCkTzmRThb zx#$w19#D}gqoUjvI{uQIf`GM2gKm2nT|2pvI`L?@)usctmObb$vR4!qX%vV=4eC%R zokOZ*`kK(x3ks2>PkiCaw*2jHb=1;iYtle{+X~eFq%u16M@`Qlk%i(DYz<`pm|g|Z zm(EYt3Mo|v1Q7?J_6NfYKgS76#nzfZ^8Uj}%#FiK-*wzja7Rd=lqEkpqv)A(0EZGa zZTOTvb0}#F@zC%HbiVq*k6b0Tw(@3e(qMc12yA*^3eR_6NZQ;hGRLJ0XPr|ZEgAWT zdMY|J4Ap?eKyyJ6hzvi?J|Po~8FMaA#1FY~Xsk^doF6#?rwpjKjn`qLgbf($GRboi zaSDYzT0=wL3T{pOID+{A5kP^s?Fgc+Ndxg?Yam`uz)?W-k5CYgsG)@@^+RxoLY_o| zaG#n3DM%XWnX(`-fB%P_>{x4)24=bj<`pL(0!2+Fq)D4zUrD->aEGyyFv*hmgPF~! zT_DBRYE{17je}!t(qR6$E#}KvYi?FwGRbgEsgmQPiG14+EV$rP1Xl;Wb`o8W5uS|P>f&_5&!>_v9SMqZ2U8mu z21$;KL?5b9BmpASp&EKA<;H!)O~_$w(m?$1@+qWi3u%Fw>J@XEfkZ{9*r2cutr8wP z{UimVK@DyI)61yfpa6xVfCN^GO1Fa1F9`qm{Z7uGwMm2U;Z=k!hZAjqP8XcQqz~%` z`Scv_Cgo?en{jrSe1P^7>QH2@=|t3tjmNf}z=B3;XVSo0alsjnxHl(CUo*h2poNL< zH>RsXM#ig&gkp;VEvrRy8r_spj_(uU!?|$5t*sd({>lPrB*q2j*{4}~KyR835_|jx z(rw3UkX5V~&8U!w(J>VX#guwuhPnqT0w_5%U4$-~LQ`n|k(=fpYm)};3fr$;H-mOM znonq&?0}~#<_sd>K@J5$f1Wu~=LHT91qC?8aB*eu8dGNFw0*yu`^4I$f%l=ci>Z@~ z#bkdiK;r*L0e%8@a+M4Xx zsVzz~Rkni}9*{sp*pg@p&Bs`%i=_M|Aw{Uj7UpS3B%a`3;C{v%S3yCJk1mkrg^r5?bX~-kQfOlInvk_VkWdNN)J?)Cci^-%ow^fY$mql7l38okWkjvl^ZbYUWvb9Nr^8-g5b5qyctOFWZ5tEdGdvGyoMio%_Qw<_qC^eEY^KeH5 zo$m&nwMm2SD9=NqTe!%Z*RW#RJJriNgOes|pa}XiXB^Sql97C5%A}qmQVxRhHaGs2 zwMm0=H7IQ(0#gVphQ-FlCedJI!dDtAD)Fc|kfuk=mIfy{<+M_URgt5omgv;{_c*$M zZA}`4D{X1!G@emJIi153>2sxjS>X;ddt8yUCa;Nb8X{C^!--G@ouage>PzJb|?gZB2`4BJTOP7N2*?yP5W2HEZ%fPbN%v>J@RmxFZ8XHHKVfG@>)2EJ5sm zDFrzDs8&!@kb$Lgn|$O;P8>gLlLqZdrlh?e*Jeth6i&LwwMf+xsW%U;#RP#jL5MDtAlbM-W<=Opk&96-!yiJKa3s)+P6MT;I)~NLBEp0q+PjX#P>p?fl!#g zX+d^5WX3aMi?n{gL!!$MRgxS9e!=IvNsU;WH279{Wea@$@+`yQWd=r@EK*?C@Fqn&E5= zaipdSIlz~Z0c3o5<(c?MZ!SX1zMoW?zDWO>n-#P+Y0%wB@onuRM?IJw=1~-;?T$Oq zmr^k;*1QjujBaL78~SFxBSwz3HS?q2#gD#O$cK(0B2Xp<(NjaRDMkD-LWVsOC1d+PZ1J~UB^Ge`A%leTBtlP0y%6GKgq!|z3@ z%wdBpOdlX9VmQRy2PglXlLyP%;xdq;gf+B1naApFL0C?5t={}qLM2k zi6NFynulZ4&S?0bKkEdEwMm2IR#_5qn`@6o0*ecE14Z#K<0T~yrX^2Q8*5GkI#|-T z2eW{;)rJ3OZO#1n?>9d_vOCP^kpWw^tjP9EA>qjH)-Fv!by=xZG=T09K&X?|YO?lr@Cn;_TyR-wV}w!7%w-1Zf(%8_Jj&6^ zp=pPyu5NAzYir9}twpe7&5F+<1SlrXaMy8eQk3u@T0?E%n#+n%v_KIYLF`*zpFel9 zFxD0gtOsRbgr~{`wsC*fApJA#e209PsU-Mvk&Kg2FM;ZYqLDN?$n;VqR2UWyZ(J_B z-z%J8vNmZj>B~+q%^6454wsv$V!sm>MI~NTa%2pvN*T&q zm6HtJ_|(=W4Xh6?mTDDN4>jRQe>U%4wwn0={+J0|v_T@xm=PgBq(%V*LGPS<9dC#x zqfn6{lpcnYwqpglM+(s0VrS7nyx%|^Ge?}krCt6IVE|LoLx_8*Ws*oDG#~{{N)v{g zOaaV^s*(JYTm0nz=42aMTQp#_hsri=!K}UZ0-|xk0gu9N7}2>Wqqh^E6NS(llr|}O z33Gvn)rC`JZOwpqd}T%CrT~sUQ!+h9O^8_c3sEEQS}SVRsA$rPS|Fr}uZLcrV#z21 zNhOb03!h22!g6O^_smt|Y#%6VlLqH|S8*=X&-&BuYolhg9Ytsgvox|Hi;PYtonEDk z;^GpdKug@pv=xnPCwGF(+N44Dp;cs4<{ELfdefP35qIe~?_4?v&IL{1LIFlNKBsVi zkTw1-Q*$bYJwv~s;g~wKEZvX4%LzbhlLp|OEx=*-#26?uu&v;u<75OJD8%{Vn9GS- zew1BtHTc_bj|Ip0>rQYmQrXU=!Ex^bha^h6a$J(0gq4=jT)>l5(nJYsjiCngQ|hsr zP%267iDd8tr*3yM*2mhU!D8O3ArtJ%`D`@lF(92+N<3MFFE_l!j2c9vhj^Ab8*)L= zU{@gukLV@R)r9j1#fPMX^q%{X+uW>`tx1FL3A0w~@Yy}eMTU5~m(p|x7rTV$1dk(R z*gY9$(T0p7@<#;yvXCd;blq87G&prgUrncLUoS2<5-KEHq07^$1w)A{RgAPhBf}`` zlRjb&8W`NXUe?ykkAI(Drs;HNn=0^WrN(Pxc!q-1qDIOpkPSo>5b}J0YN$lNbyhK~ zHiI6Ti(bIV>zr&kYl{Yp_S7jR7Cmn6;Zz>p?yH;HC_k|bkP`36ql=T;A;U1K)1$Fk z@k?0ACeFS2W+#}eO)jsGU_w;28w`qV$Wjts(8+*`v0tHlOYXa*m=O?6p=}l+{^aR? z@gH?mx3D#7!06QK0?gV2Qy^4BZIIjvwU^4zq5|)g{zhbb(3gj@O2A85jWh4U-Ltl4 zFnm-F%mTxXGz*EZFq-YDNpJDDJN<=B(yOS~(*{?EP*V}uBj-njo|Yk*tV6qWiY%K} zXn4qt_=L4dgHb1q7GQ|wg#tgC40N(@?fsx9$m8wM#t7Gz@)FuoVM-8BhbARLvYO;# zqgFTcAG%2zS(`Mt9$8ttc0hB_G@a0H(z$Fjosy2F?}^hy&LvA2)g=>>$ovTtjrWKl z5!9!|rTCnqkz}?e4ZQcPz?+TG32QHoPGAgDrMDl-V2G24J^|Iu5QmSkmQ6^zEra#w zTcC_4z<$upx>=hv!0yq&)EPCPm3^}QsZDbj`ixJ_hzW^%afYC~l3rFp!o!P?MABK5 zGmgGFi2vVKZWF(;;}JmKL2B)Ef3_}smys#c9czmQ$)l@Cc8dCp#D;axh=RA1@wy^K z)(Qi}sRT zUgUnUyD4Kq-n>A3mC@4~B^$V`UQ8JZPCH~lO4d9CVO>>$z8ZhR%`6sclWVX+25ddt zP1pzm0|rhvB+i*u2{FZkPe3k>sp9BPLEq=(L0Q)+=uf)IbXuD<*dDrMb5`f*!W(v8 z9u#k@5~W=Ffna|~YY@X4(V8nEUFCcL8Us{>C5;vC_bvoueT$Q2vo>iUe)v*|rp1saXF`RpYN4)O zLqDROl{i1;PEMGKnlt$#$TAiX)TC?*)E-l#xBnw<=w;R>4Z`lhJSnv+mQ zijWzkhC;W3umXb$?Up)Em@C|N+CAu|s9Jy5| zz;hoK9Ha~gaBYLd-*6MMSerDkbjrOczS8Mt;(Ul;k3nl5%410*%!v z>^}NPT0QFjNjX^_Ym)|tz8aW3CD$Akkz!&vKWkqoRnbV1XWZCiYpR?kL?p!M#N?(3 zYHCg*xk<>Zxg$USs1r=qCJiQ?^V}Vbn4vW}*SSCq{Vfa$MrcYgFF2zD&gH=HRmKt+zD`g)cg_hMYm)|!4lT^;l!`h}-M={Ny<_7+=AmykKzEfs zBasPW+tIifk*Xj`$Ow4mAdn-}rR_iYO()Q-O&)QY%Hi1JG*M54eno1Qi6t=1BKj0b z_>c^QARN;M&EQ8?OKDKQ;kf~2Yx1b=Hyn3vKU^S6qdryLI`Og;jTus$sB{i=GZuk%h!h;YKFuaGrHaw{S4g4kGvE73qFtI~#c zA`Tfm7`erNz>UPZwMm0XhqMb!d&N)pqmZFuJNp7s2{`J-i zPIjNQNrOo9uNH_#gE^y&Z600xRbD7Io5yT7DyJnv5sW!!c5zL)7+M*0Xwfp3wfY-w zy7jD09(Aq^vd+~i$njOA8^|*A;!>lh-w=8VxmjEz$_LCyjgZ>ZSKsfuX>qnTX)rB& zN7b8si$jGh7y&s0IU4`Qxpes%V4Z6v zs;n78o=m=kx`;Y2iE?S0M==|6l4zh&6V5#S_P+E%Cu?PG@`&?P_jmjBVeixC+Trv? z0VOEh2gy>>35K(mHaJXBEFh8~Jqy#ZhRLKus2_II4rXoAfYMDYiw$^gT8+8^dB~Qf z4bFsUIl+Cv9H~gSS7;n?u9_+%AM$wH<3i7d}jcO+Gwwe{6ck@UcHc9;djx}bFI9JX5_5N%+?_4pCOiBot zayuY&Q2Pj`cx1&=bgwyJY((_gC7UEy`nP@B$+@yNX&~uZz1e~b`PF)h`(vx>zTZRY z8SvA|f*}*goS=r;Ib8S%VDf@hB+e;y6NE^c{E6U8j#6XRMh&`U?{;@I+8rp1%@`JP zyK}`e7m1efm?>EiZ7&cjAjZ_N=}7(s%m9{Ed)1BRv9(EqOZOZsR&CO+8M!WEVZCZX zQnPmk{yNt_+RmN3vuPL=fSNuB4b}zY5a8o;;-se;Sx+(-F^NDbA9{EGT_?NK+N8m$ zNzzB)gpGE(8H~<$;$yB11w{Z+lfs=6*=)Li5xx`Id3w$0-9%kT>)@UE)9-V%L#>S( zgqjd!5bn))g(5=;qENpfRX7}#XgpIA?t16o;x?3~LQWewU4+(2h9YHzAUV7)Qlz!V z*(6OZ%lMDn^dDH8G$1vx#DMG%VFIWn5Yh~C zx;mAkUHtEAoNP{OlLnQpT&>BW$5- zsUu{ezme`k#uNE)1aqhr43ZG}cb}91l6wWpyO|`c1=T-yqpxRe(x6)Q@CV5CA}gQ< zz1>MM-s^;h>=$I|9NnEfqgFzcNrAyfgqd5Tz|)}6Ks~6PX7{h$u->gr8feQY8+m?v zPrdZnmpVZ)L)OPdq^S%fu#5{vnlN=q5{5|4vQRQx7JWfkuV>v*maI)0P|I1v3!q?F zywSZAeNx4zHhIKO-0zF$%iu^0 z_Z|s4W?|C=O;aY~dW^t88V3Eqh><1)V+fh?P|?;di#BpIGQ!&A5j*h$w9cJ~x1E1+I-!@96GX+SM!+x7>&{bIar#EH7-?g)3+ zT!jn{q*a&)3t?O&*cb`M%m|b`LT(e9^*s&jw3N@ z6(jMJR)mv~u`)#Ip~#4Sv&TzJnKghrjn^w~;EA7p6&Dg$C| z)F551Sq?{Lf=QPTqG?)4r4xuSIhRZhQroDRP?N3cSDkXX9KpsZIxfx0`F<+N8m?e2KKU zI`G!-|wdBT7N=40V{i?`8z4wMheQd#T34 z#j!t!7}*<&MiBbU{b~j?r3XWGHlK8tDcFsqHYybAR7R_0Q`aQG%S=S6i9J(#HW&~L zUrNr~@BWOFRkSu~pnmKqsFw*dW%vqGX_PDyKZ_Y~$@F#RTnb&B=MQxkbjEnrbyD@G ze!>Y%Ym)}%W6LmM_O}7sv{wW#Y+zqQ{DJ$fyB?3|y6e`Z6eG&`BXX*PxM}P1k&2*QRj2G+tN%;p0(I&Mnj8Y0Ao4n`Q=Pf zaib8NBcVvom4#1q)AHpNonY27@gd#f9{6UV{OtvTCQ(=WsYWr5QDb{MLD$yWRZ%BD}vaZ zz5#?0;LtMXh!H5X154L^RYBkkLamK*>er7sxl*l78hp#yhgE!?r_QI8jeZR+U|s+k z;DAgLl?Xv(sEv5ijZx(K)ZO^69GyCAqXye@x@c1FTjaoa%_(3Yb5B@Kc^i#%CUjMZ z=P`bq$RSN&W1)brnIqrxO->ff+N1%c)h*2Hu-yON-YLV;#>7#jd|LNnx^Eav<}qyo zaWC#r(T*Xnq7cJEjxKL2`ExgJwzWxvarp&Y#>iXu&KBq9Iu2di$ca)LAoVQ6IT493 zvOI?^LwhYuv=TZbs_HsQ`ZhNXsi2M{%@wzHz z0Tb*Bd58^*a525Y2?Z3$Qn9baJ^lwbla{Pa8eq#FrUBM@ThjAfGFUQh2urdEN_B?j zo`_R1#fP^(ra_<7uBVUP``ySySerDEf~BDLOn*8r;P=nwb$_&_=b>!PMxz1FrmW&d z!Vp^Rj67gO50qDr6PVDC79oxsGO7qV4F{5?rMM@-(Ag7?+%jvU2JrGdbCzl8Y)-W` zo6M&+wILZ#O(zX)?Q(7Mz~isMQ-%o!Z;z*4IP(ll(F^)Nx}j%Sn>6^AUFubQom&+e zCwg#9O2l`T8B?_Erv&Xu$vsnuQ%Z@MhER=c>_%G0+T_aVvsd45eEt*V86tj+MIiT( z@W6W>l@wA;G9bNVSXPng{O2FI87g3H(m-kn^TX-boUX1)7eh7>27-$LtAXAv-F}ku zl+!IuRXL4onkVJ{mvGh5R2OTL29DNWHA+>TebapR%*ZAI1|I`e$P6hRdCSmak+gyb z6-5*;D$PlIyBm=zYm)|x)`BxwwEEU$u5Uo{eUdQggy+d%A*4vD8&E@FPFqc}mA-b# zEQmTB>gdpzE4}mn+|ACjHfiA9BKQ95>pQ3k$sh$XFbGE<<#q_xxncvtlgHBs98(6+ z3XjOgx)52kwq|~ME%^4jNU%-%34tjtmKcmi~^5XWZvI*gyR@|DN+1`k7;X^>;4qm$(! z_>_C#zjCv=tW6qVn)$c{te%X<_2}%d3l`TG1b7Nmh>?)M$tiK?GN+sZKZmJM%A?-F z_7hlt{r0Qeb!`#f+GGoh`2WEJeJw0Oo9^}`OFlTqppj|+P1OYvrXVAyb1rZcFkvW! zGA%)r&?3kgTn(f>NT8J(;;%YdIa`xv<#dU030^z%RnrFPMCC6%OH?Ee^ej;-Gtl3& zGGyX1gP~BsK(PpJQ%ntq9G%ciNW;d&T`q13YikCKF4!CbvqN;XH793N>SL|>nBBNrh({nYPK`#_2+_JROu%s}%zf74OH#@3`kdFhU%<9$k*W@~!3 zj{p$)oQ}H$@&#kD5om&~8luQTiV-C$ybSVIsz5^=x1GUKI&OZ+P26m4a%D%($LD5h zf-uHRPS|L;p2%822V$l&lBK1{T{Z;?edGs`h$Og_TazA8FPA#v$$x#7yRyAS&`Pqi zXaKdhehY}HsEoG7p=kQM-MARhK?;hWToizPVHgLrOA_-(J~s#SX=eJtoRP%a$=nGp zYl{Zg?cz~Ls;%QWrh{KgXS3UAnt|fpaN9HMn+y~;JY5$IU1T(tqwB)fngO8mz8b&| zeOKQ2jjiSEZ`+d4l^bm+VIz@Cxw95+T$&04*mm58O!;9pA41j4$BIjx!1$G4ak|^p4Pu~;o2?;^Kk(?=*(t)%w@-t{Ihzt0@sULB}^szQ+@aTk0 zi>E}=y>6)^>o*{BU>Ov;is`#lh%&k*3wm)8r{lj!IZ=pbiXI6lHyhB}nn9y;F^ADG zG6p`(^sMN)^@9AVr;i7{>Ri#?yj7BvFW@ZIH9TEeEWG!EM~pGwK9wx0n_ zZ8yG#wMhdvyl`RcEHB;)-2CY%@w{F+5%00<@CkbYk^T(MF>Pr~cZ4VjnS`o!5GDkh z@R3EqK^G=|(G6Z}lLl{Of%nrWHT%aQQWnGAZe9?5*Hk6AH!H3dQo?j0LZ^hU4F8pX zz`QhEKd<#ne%%dVYm)}B4g;3=^X^_ zYrKCj=|#HFg-2;^%^-UE(j~lz$OwZCh~I{~I}ctEzea|UZoPJfecic(DT|V9WBfN` zfuuU0Cye!sD6}z-(BrJ~q);B#oVF9+{X>qjr`ATz7og*Vc1fG!3|;oaZT{cF8#^>B zn}$3jtJ#}$0T&@^9Kxqhc<&h^#DQ&S9AFxFD@OiHZc1v_CJmycCxp#8ob_ktmOG^m zL6XcU979GbGxM?}P-V>^HNitUCqD4oj+V*Vr~$R)${g}x=o=%ahPb)dYh5kpeKZAn;{SKeX5;CTJCdo9c{qD>`*{FuuwZT0EMzB2?C+PI zcM4Nx8Py!~9*Ex^3Xz(DYLx5YR^mG%bin~kV_hu#TW;zV)+P-?@8XqucpV90_ZaKE zHJz6nOvcl8RYvm=IRT#myG(YFJCp#HlPV(Spe3PP$Yc&fQTr`7V69CWu$r5&X47t$ z`{Tt$N&~P?w8*B>i5BWG&_|>TDHB;+`eh++)Mk@2q43P z9|q4tj$G1I%orxo0?G)GT4VQ}R>qpSyndy##)D8Q<+T`j&Ek+(p?Iz?=RpZhi7^r1f8Q~wX zFLX)BBIpTF0p-9E8?|G<{vS7Ed96(vV0!G=esNlDm&I)D6)3W!GDV3gU(nA8*)vX- zVzg!kyACcI%t#W>t)~2gZf5;in>0W)<#Qk5Q4!VZO?bIB>95x?Tp;W)Zjw{P=nxpY z$evNj#Ze~lO3#N7%6+}I!v(|_tgLZG&TW@#BKouU0bCjhN28h~1Hpl!&PFFRRRrA zG53_iqvd_W5ZVqrT!w4pA@3KX%?g4tze%1wCE1i11 z*&a3fz0robm9+gbK)47!;{S&bHO9S0bcHlC6S=4&pwtWj*Yg>+=W93qyWO?aPYO1=k&6(vVDC4plWF$0(Y1DT0f{E=>+WRWsgCyld(xmt}3#09)Me z`XYg?$>_!IHw(`~`nd>}DG5-60ilQ96hwZs!MI@PHzCQ|^8SA8%T9KswaFtFYjmbS z^K8>o>@nz2X7dPN4q6O~#wg2BxT44nA&~j7kT;RC)8)9YyD0%%n>4UAxAwvw)LMho z;q{W~*3r(}9}#NE6lINzL9U-22>&2ppa65skqZYT6f~5=G#BAF9&xg4)+P!w?ONQK@DiI~7HS#=uS`X!a?8>-YM7PB4|WCJm-G)HGWWNNV2%Dr@6s zo@I#YDVlDip>V2FR3YMkO^MVBQZoc$lw#1h2h@olI_BuIw>D~EX)@Dd6B>Dh9%59J zqHRCuS~X<0D#oXh>!q=R>@R|sXtct=AS=LJHfG|;T-|R2(RR3?-%`4WcwdfVdGGd`IC@BTNFv4}zp9^|^@6tgRU=$?`T_ zVA&DpctlegooUqwUD^JyfSgvVX+>GIv&@8uOz7ow+%fzd!Nm~Wjud8E#5^Ykb0X$P z4vW#oI&gc~O;@P3Ndx(tj)XiO^}#&s?H2p{MQ?T%>Tw^)WJV_a(qy-~N*7_t_+o~T zqFqWF3)TtKRT(8j!bdU6C53*-Bos20vTr~CXIHt_Z2)9#(g0rx>~=3|L3ciYG7r~> z7Ya>}C!>*mHI+at$s-t)Wd3>Qvw&2d;QFEOa)Qg+q`{@@_KWk_-yfr|(+Md?je~hX zLM@3Q9mp^^2BLFmGT4rXotZoE+Bl*D>4!h=1d_E$18L<=?@vdw(Xd~2gMyfcWDHkL zzl@w649Er}{OUKhABxMr}FX%`|FiMXqN7~YmT0l3yA@&Ch@nl&I-(n1C#M-2*B zm&1~9mQbrAa6=v$nI{Hp6ScxECr!??mq0#;h}w>{y8Uh^;H*s=U@NY(=(&DnL+exJ zXUunJ+o4DcC7F^ntYkDR0-^b!hDUjx-<8@;d49(UCTo)hlWwA4+#DAQp4EJEX2Wt( zkfH69{y_%H1~ggI&x_nDwQHounZAQIFTZSS@LzW$U4e9joynv2;edi}F=-WeJNIFf zNt*@^32|wP6HVurFZW5!?9YgpmA-oZ(z+uj{>8@~MF!SJ4YZZZalaV$n@x#3E)MES zxLtW&`Sfkj(3&(PNKns2s3M0IA`=`k{WyP{8;x3PlSdsd3Wt+^)$J%VcDl|Irnqwh z!igwgi=qh%RidQI%d`R*6HI*fHO=)tH<78e$s;lK-_Qzoo!21zWZqzo6XPbJ>Y_mo z$rCaU`k;8w6s=`QaGM+XyqhdCGVOLI4W^aIhO2`&Off}czWD*G7|AL{KneiVV>38QBX ziBqU8l%0^9Z1v9ypLt&aj`_lwflt8XCoc}&Mmi}^x9CI zuecJSkwIGxRhdCfidxmIKYYT;17>Z~;9Asibw+%s&;(-)NH;3RFEX`;x-Gd>+7OvF zBxA%;GpNy_2pCzD5yFul7f11@zwQK;wMm0Yt4J=M<)NBSiorr=cM9QviZ%Kas_xvB zWDRJwW;PGvU?KSfP8aDC22oN26K!{3*Tn>?hH}X5sUlbWwWQy?gg{}lu5GeFFM0o!Ng0Cn6%cR`2XG8R5N$@UPq!m(Ldcs zj8ho1tG}T$z>Ep7HfvC>+?mqQINF!|`Rf*UrmCV4O?_x)Jb9#pa%A3k4^U$&8DNfM z7G|Q(sb7S@=_WF;Hfhjl_WR;ep<>myeb#yFVqWl^WHP|W1wl>-Ng@!yAtOVv0H>N> zF`ZQS{fFG_Pg|1)TT3=J`*XV6?WUlXUlu4tQWPc-$x5nLG$<001oR7&V`Z9|4=q=F zW4o0TJk}HfaE@ywJ;j zBSIu~D^=79kxZl1eVK!uB{++;SuoO^KC=|%S?HaVshEsQ`?tK%KX|K?HM2HpAnImA z?)oY9C+&T7-|+!?D$@I$B_=r_9?gV6iq{*5-0d)&WXnD4hT&sv(qOqwqO)l=ovy9y z((Zz|4LxQo2CbhYZxDy6NP~!o3R0@Vm-A5%cT?H5wzj;%e=n>3*AUo495yX-_msexV=vT6wT zaw-C%f97B3NE9&%g+2=i8_@G*5aI);&R^X)sD+PbZPK98FOgoNR@Y&>6HrMyzF_VQ zR2F8Emt?Y;A;*wOQl(jhPYWdk=}Av7&u_V@@L8KY3<%aKno6C3AS^=8#4mvB&Fjj* zgfODDAFZzh!LOLL!ViM&AuIIBcR1N^)+P-g?aS#E>T9*hPCSfbBqf;9%!S}7uM4p* zTueO%0#-0X3$?UYJ0F?Xm{!J- z2(>gqK%~j2Ofoy5gvH^4vtn({fY3?L=>o*gsi$6e?$gI#d3C$LUJHuW0rH#ZtkYeJ z0u;jzc>jf@2-T?OPGcN7#aCTeIQjThf@;Yb))oyY9o1e0RScL}-5WxhQnXi59+$N4 z_}p4bACtKekx$_6Av!?lPEQ|)h+om_hkhRCDu&UQ=p6iCbK~(?n=}Y@M7xA=J{##F zPt!S!8)VBn&_QW~?#70s4k-eTQgT%g`H=0QlbyJa&KH^UCpSgq#zXBiN&NqwWwS?b z%zD(P^N*&n4rEdpM~f2mAwu&wWUdi{H@iWR}9*gOb8XRK$}}z?Kd9nj2Ti+N1$? z$%QgP3=kfQ>W!H%JT4HOA5~ol(N03Dn;sin!kDR=;t(RyMgo_i)~M3N1o(`v&>Qo2 z)}+ZUH$`S^(je6Ajf+dy{;CI!Z%$#aYw^fvO5=_+$RWL(Q~QF(1|66}C4M23xC#+c z)r5etKu%0g2w55W z*kw~b?54ll+N8my14M91<)9ObDrw_xsDYpuA4Sqf3ky-kqs3%r1J0NX3XMYD;_}A* zPF`$llLm|SrDke>EHV80C^09M4x2C%_A3LQ>EVJ!6QTI;NjHAUKa}K(#33$xDQjy6 zgyv3O0I^dR8#=0Bo0vgL%))%A#5fiH4UKjc61Gc%+Ez<=}kJ@vB^?);+W~ zX+UZ21W3PX8A*C}2u-UTTNX!IU#e9o{QfYCj`_*wek(C(1nQdaHc zZ#ltdZPDP<&+tWjG8mIVgR|*&=fcq-0ADgD7@_$YhY2|gX6rIt9S4IkE}yH6r6?gyNBIK0JR{YBA8S%Z8(HGqm=mp%;M5>*(sB12pl+r!P5>r%Ii zi^AHP!J%KIWgI(3Hgsm<$fGh!u(t|kIire<1|v5&QWm_rtP+Hv1R6OHK8@*mNXE%O zbh7WPEgD#w)w%+!9ci{UG={{=tT}l70i)lU#jOxeL5my#c7iN=rjW;%$^g!TlU1^{ zX5eUc+7WPePNAvN^atcp?l`#RsHH+Vg`yLsLi$ojCqp04N(in>e01_=JY95~hMGol z(hW7u+M>axc@{@uE6#FCx`&lfB4lzGtv1YBhzJy!c0~^u4HP`fF@1ehNYNRn^|STM z|JhY8o$1c7HEG~!HqcS2Zu=%QeMmmt_z*(T0 ziZrP9-rxOpCrf$AB=P@y*O@ga&4YdM-rs327NjDW>QIi}U`*TkDQ)aYwkNrOB&$#v zppMT-EOF=|%)m2|R%o8NTGx7RbkpKzZPEbLQ_PNB#^IO%ldh?P0VxH|V@>CJLQ|l7 zoz-M&AT58SuM)T>GA0{$Gy9QHN2Al?dv)zsk#9og z1NC8g7b!gOj&rB7M`54yQlRPzzs<{aAniZs1fR7@gHMn0JPKd6C*s4KZynKlL#H>F zu6BlCMX|JiS5zqR&Tuc&kWOQ=`WoZEaC7Ten=}};`o~ciMWCyHdai|fI~S5$fW!d7 zXid*s$gJlW83HAjk7^z-01HjJz30*)kb2 zqY0O0O=v=NwS;)tRApsYcS;7NPPMAbSQFoBH7!yghUavH_}<4XM7^_&b5OQ zF&mayI*iz5=%7>xbTE6|O={BGqyeZ2D@UEhz0ujCgdlvj8zKz`OpS*z&}0nGMV}@j zdZo{wrduk#Objd0VMez>PU0`Sk#(^)x#=YC&3AWsG8(kNJC~8sdlahab3|$_g47KG znmi=}jgVz205g`BZhwZ#3&xY@oa|F;lLq73XR$wIlDsN9Jsg?8#R&2OLTLb35iU1^ zD3Vnc|CEV}3EGEb`s5s*=soYK)6CkaL8wbsM{U&p5SgCN_2cs7;cpmZ$B331d43s@ zLHSwO&oq>fw4fV}S%k8Fy)QXgKWmc)-P(KR_3{2_vb$ld1bH$>e?UuyM_XsKJ*G$z z!+>U(cfbtu>1Gi{QTQGV{ z42__CMa`~iT8HMO=VS$~O&W-6^BGi(yTL*Afmc?FKZ=J@mGmOh_C$kI%^*Y84Rs>& zhHaSmcie~(S(`NA*503^)75<3@17CSz@Aey5>g{c=zt+Iq@78@o1qo7WJU%W8@R}Y zzF%&6)XCymn>6@z8`n{{=V&$??V^c0-t2TEbL0CFnxj5amP`lbe$fvh9yh-xGR7N~ z<~+G&{?iXQfoWlq`2T&aSp#*gQ#w|n30bQ*IxidPv_PU0y+n~aOPJAv4ggJROf*JL zJIQGaB|Ycs#r&KbZE$Oon>H&voPD(+tiV^R`ZRjrPJ|f)^EC>d;&uB&k+Nbo(sC@vs^2&k*-1Kn27`m?O9=7%NpM7<~_7F zY4EL05}O(Hl6OOfR>sD5WEW}72xG{Vj3Mlu;;gMz<+`*k<{u+}Cw zfqQc_?v5(xV4%xBhiXn&LrB(5MNH^KM7_pYBZ+|@fESmw6pt@BSvhNy2HfG8Y1swn zBz=S^no=61qcWB$q$QbtUURiN%TM)38jUm!dVEOiCw;$D^P{!TzQjIMyZ&98GLNCSXn&^rl72kb;qz z(NLpMQQ=oqcTj#H8Oq3Z+_93xQe9^dU1aYK-ISHAO&UnL`Qa@0-xM{3vz-fsJ{u1+ zYa$8i6qYmp36!Xg1dt0zK|qbb1=A;KcB7M%WNp%*(e$YcXx7%-wU#kY6<0RyBI9it zZ^Vv+jf4M_DTwwFS9`>D?xNmqZOwqteXJKi?8yAyRxfaq3I}SkG^#=&;5j1205_e( zL@vTU!faVijb>U8a9Pw&6jz^_nBCoo$IvD( z(@7y(jw#HNZHElZ3<;X?WSSw+Ww@I@T-i@w<-Tmm1lA@EI8BYc2yQ;6S)HtNuU*9T z9Ha~yGm0T8?jS|yn68OTk!|MkKu?5@Hr6y}dYq%vW^K)&(hz}!5qnFeJD1Z64eDCE>>rIfs-LX`sf6v#`2F;K7C zxhB8nreDI^q$Nl_CuEB`A!}a-^mF4Jwj53)Bja+aKOQrk$$KI9%=G#ixdSHfNYUBF z%V2HI;PZv|*RhBf@coC|wg3Kh>?f_kQk4sWZ8d~gP^o=U6YkQ33(>Jd$pW3$g!V?r z=Hwh2YSOU}`3-As~u|;^M)x zKtC==fvs$1=})>*TClcw`KVwPrwSA#`hy-w2oX&W4+-(kxQ9n|1qD}=rSOFBP{8r)4G^7x@BgKeuJLYOJRN^EbXSS zcB)V$XHd~!&?VJl-8){^1-!pNOG*5H>~Lp{d2lvezSGmXWWa>uDZUN~w4GEBM_!nqd*Dh(k;jvl48~*>~ zy$h_Z+g0CZgG+hX_<^r&{Py+h`f<AxuGZOqr$3==FTVe!uBX1g@v`&WtuP6K}NAJFArWtbq;?I)|x1x(A^QnWrVAs z=-ZfnI1Tm_;wW+z{f%obb$=D9Bf6zNR7}6Pf19;`?%qB9^uCD>Rg<@YJ__4fje6Th z_!+v+@7G>R+lU{91wT~GxY*`0(zi6L{WWFSib;tGI0sf$V^1W)^)a2l7Rf4ZEvqU? zYr~hgjy9S*whomZuflv3miArBoqe&iH(@Ci>A^b+GZzNbg(eTlnny88n=SlKxFiPm zHDu)QUc-FaP8*&2X^D-5z*MYqg*Me5z6X8KI6)TY`+7L1R=qH6OHbBaev z?bMTYm20s_&Y4@LKFeIXP&Ak{KwzL^z9Qsp6+`mIeIP)7*T+j)9ql3WkPDcP!qRGQ zr!{6Kf5PNo9zsbzLPrmN2E-)vlshs{`1=&&3trAJU2p&hdm-uDKkFwCQHOlU$FjNK zm@~0mex}|@+Gpbc^ikN@+FpbsnoZDoUsTp;?-`nBezWpc+6Me6EVuRzd4YauX1zt& zGjz4z_mHc-#``EN_93R-MZut3%s*~=w}V~T$kY2NafoSm8RXM)S$Y3IuGPzQx3acd zaa5fzms5NnoMGeM&saQ%vRJD1LCYu++HyHEv8Z+;X~hUiE<%AFNr|77QytC0eXsH} zz6$qgx!e!A)PL;D#ZC$uhc`^uO`EAXZ61sg2%tkV(pMuC++t#1l}PD^Xo)wFF=5oA z+Y%v4i-Lqj8$`ARmS8t!>6lPnT!U=u|}ez8HW$3d^gl ziAffq7!>*vuvQ#~2jQb;98`|@^lufJX+r{}?`!@8e@HaBCwYuce`vG+57dSBYw z-0TA8qwtAq$=D1)EvC)>mJJ-4I7f&i#_71onXBasMhF!{I7Ao2szgSo5tB(z2e>wx z+rZi?m_zv}EUfm!l{K z$Mz@v9NX%U4|!RyfrnHv58hX8irh51n%GBiS{s8I460Tbi+19_fHgxCWHozV%3f#J z?6&uZv^t{0t$lI~xDiWJ-&6^@(@mDk446n&1Q<3TlE7_UV1rZfbz{%WPOxO{b1(SS z5gmT@NjAN|I$u0fUmNO^tVj7MoMhU@jeTKE){VJmXcF=sW!8BG?W3^J+6wQ<`D`_9 zX4P_kI`Z^RG_@7p0qCQ!yxPG|iyAi;x--wvZ&+LK+(7z=X@O6x>0L9as|O?Y#Rd(v zyg)}8byQ}+f+|KH8?p(4-$)!)C2U7n_#Jb~*Lx86le_9@F4xd5@CH&J3(Wif4S&#f znA#5eCUi#A)9m2e0s*d?b}}QL2CGQy-Z4vWD@1)M2QgK{HbDQfYhV^UcIyk*T=qW2 zR!4Npeo#`vO)j6B=@DLwi)ZTnq~-dpar)TYf6yCS%R)aj=Zk8F6!J<;L!Y5H)FI`> z3o_JYu#dtrYipTj(=#KL-Dl_mYipSoFdv1b)lR{kp-8%zHwMC+bK=^?BcM8h79>_i zQC(pJU~(=cT%D>6Q))^d8Fj7(Ka1E;2BMDUt|IN6!6mGZ!Xkf2`98i3JSu`DRu{Xl zy-yJqrZ=Z|t*?E5h$f#tlFXM^VM|rOsxTEpTid8qp+xl8s@9{y;F1* zaBEqale1ZcA37caH=dfFK2_m3wa1aBEE=kqA!feTycbp*T%W4WDn=);Q58}IrbxXb z(og%b#88KHx#UYPXViCt8(}=k-NFu`=n~q!`1L{SsFMO6loldDTV&|8;Sra^5vZk5 zcYBh1$&b|`_hU*%G2#Cf$5DWzj8j}Jg1im$)Ow5k-nV(dqz<_sQ#OhT8mQ3X zYKG8~rcAp6PYx3_brj)!npAPk09uqMC>}g^`!_%41(Q1DeoXl&rdTX;U_)Zq8X>+~ z2AOGsGKmel9yi_=7psV>gLPMY+ef`%Qit4+sTjo+VH1rV6_%!Ax!YmcTH+90bybJ` zR*LJlI8`;`==9j_Z~p-=nA9QnV=67C&%NPU&nDG+*0c5<>JieERR;;C!_^M?r`AMb z3KYR#ZRQDCLo+KQQ0K${nHyIhE+P|I*1N+`BIcKhG`h!;%ikoz%>rxZD6 z&K0LQ7Iq0)1!nS4y2+cwuCIU+i{~*cx*pSy{+C`bsY5zUFS(NXg*V0zi5AOr+^}MX zGBA#{HBp1LaoliTv<>G)&{c@W+l=C^zu^UvI^J$O7fr@S>>{c4e&}y=FL(ikK%_3{Wl&s6Z*n%QH}8fw#*Yck@-&UYG$`^gP1*$J zW`VU=FR|NHNtBXSKe;{O1(rJGWmp@K#!m^8Vz8a0A?JC*xtGAaP2uClIlnS- zLsA%_dezy}{AYgJ3nX>Oy^zKaiKawBnXwN`D)G3?kmItij-o`QJPFE{d`Z5g*L>OwBz4HWkj5V!6@`ch2D_Nl zceuJYG&EDwmO+q&=*QJv(E?T~8;{-o`iU1v>X3UOjlVfEyy)||h>{lf(JpIq9_?kz zg;gY7SJTWxw}MBotxNm^Kkn1&kPgyUoBI{TqY>hpTt!go$yQ;$L5FIvUrBi|6;;Zc zB2U}4soOAMtN9?lsiV1n{%^SW=fCOPU|08hl4ckLS;8sx+|N8mxK*D$e)eacNb@Q& zxnaJoj-#~2xdqu%CDh}an*p5?!>zCZm*t_@ayS>0xhjg%SjKFTE z_)ae;tvQ`fg~>7owZMuwY^$tdY%A&**s4>m$Q&EK&gIZCj2umu`VHO6I+5-w2VkA-_N|j_J`H6|5Ft&N`KYWoFsOpdo zYB92^{agpW>D&_cabKHEPrKPUvP@%)}0MOwppUXYm;5swH=(l*1<)Hq_i=bWr(>Glud2 z--Zc_Wc-oc)IXAV0j&<{Kqn(B4d|J)lUlRtxjB=2oLXamvoZ;)6!Cf4-#KMQZW}C` zIk&r5T)R4o3{jce1m|_%;03Tcqyrp`0EUv)POT*9MA~PfG&XWN^EB<+49Zm(U#@`9`*3%NH;m~@c;E7B2co+8^M_+zj20#hB*!7N8$ zuBIo3c|&@_OE&b;k!de7kmVTayh-akHQ3@&cZ%z#Xu7JvL8PHW3}8A5b%Nu%-PphM z9xtfXAsy1i`PBgba6 z#hl*d!kRC}K(o2qfI;!H3WByP3Ti<`OOj2kp@mpW1EZjzZ|vQC)6WT}4(UL@XuaAs z)1N<=O*pSif`7(t6Uw;A$ip{63weG zAcH`|6{0guF&OS;7_tu&hOUaFOvxmXTuQlZ~OzhICkm%xMko zrp0RIEM??q{pSZV2;OwHxcJs&ig_DjPgPVE)$*8wwn%yE!$nH?KLPszKQAD4NHy4I z?!L(bVCm`dVmDQ1YMypQ7IU*^R5S7fF)@B4uRXZ`_MrLYukx~s)gc|&7q54*#cX19`~gmqDZ)&gsAA)3@>LzpVR_9iESkecI)`z{fF}oOK%s9mt`ew6Pcab7tcYCOE&}9es+14VO2J16l-z zW*l`x?n5dNH0|m31Rbt46^%NZy8rvPds$C)NC!1mt>y!UZ0u=zRaLOR&#zJ=}Y6$hItpj3RT z1+veh#cdAU!f?WhWMqjQR2XNbOAIwo$cPAJ#5rZ6vTsCx#7_lK9a4qZH7zdhfw47) zF`tqtWMP};0ZsS}LUxhS=EXW1&N-bo3P$vIWsU!b7kXJ^bx4Q!^`mQipgI798J*iE zBS#fuQ9x44kMSIBTXH2ib7|1ywITS@=X$}X4(af{`4GOzcaB#;(vrPuJ8KcNCnjqv zYF*U6HvQykZJ&ZGiZLc1sd~BG&VOk zUbY}~TwO#5Yx5Yj%hO*QRr{3uj=Jld6ZE*EOEx(%)e?Qb@p3>>u!4C`)`rJCL*2}D zBQIT3=}rm+OHGO(F@<*7kiYXfFNjGsbRiw$C$|>T-Wbr^ru0A`$Q5~39UXj>c% z=dB=-$+=Ewlp!e{(F8{UL3Z=c`+0V%Lq5zJ!(?a|(pxwgL(5JU zrW#*t8duaQgEVhD4lWs^V;Cq+S?3JGx<%`!;G+&Xj<%jIn&Zjc%Dxx>W~iYn|6P{H5vrX zYH}E24xKk9QxA$LWarWZ3=x!VVII-HujrR#@Wt}fezdRhb3~~_I*4!E4bgqU%G|vX zfn*~LxU#I$2J6YKX*2|_EeQrj4e{CJXa1Ki$Q-vK*k&xCNjlHC%P>n2K1JT>&Nq*K+9ddlj3ejX@ zX=7({(2l2rl)A_bawBZ}yy+Ofgq0wpieNS!?s(9<=`LCuhQIc;*Iq_hLUl+7b5AS=P`?Ea?AQ_Lsm0q*9Im1_5#B~%dhzRepXW*(qY_lW!n8J zg&>|Uqgeq>Er#v`$_A;hz!Ijp8M2^0%_fGgOCgXpM48*7$Zx&E%Z62lbP)f@?kn?x zqI+q_N=|fsI=xH%3treD;XRfHA_t0k0z878wo(q6q+xKd3Tp~raZWD@!G$R287D{6 zzki;WAAvfg`w`T|o@!@mVkS9b^@v5%S8{`$mrLi6GLr1!HQ+pci=8px_VxA8|x@Xf$D17ZvHqJ-!X{n%!UC}5j-@j9yUZqYV>x9;P$`v!*f!H zbPzXMyqk%TgLuZD$$v&cLLHH@D5?vnws5~RB=arhg|vf6(or~Kt}=>jM)c30<7FLV zZAb@lqwl!`;}&(fa*d?PY<>?m?uiUcEJqUvlV)90gi=N*czP!Z^H=QV zGS4AswpmOLOz|PH6^4KN0xy`=AsyyDXOhc~ln+wmY-n18QAYlRW6i->@m7n{fObBU zuPlAU$gP}7ANNCwQipUnH(M(k$)J=?Cd=b#H%>23d9g6~A@>O6ap-O-ODgp+1QG(| zV{#!agf^}Jr+$*%L>toK+wu0;FLpqpy9-wpX&1UrYi6~wd$PIAlwq?r@WWqc?vI!OLONf$WLCfGn zQ(C{NAM%sssY5z!Z`pnPY?ezVtNCajo9riw5x(cX%%g%gHY}lzF|81%Ev9%XkqXP` zb@Kjyt9*<Nv8{%O`k&dQGCJV)2 z6>JI$Uo;3&fMW`tj2-q&WH0=u{A8}`kPhcYk~5Bz=E8kG!qtPb@tPOa5CjXr)e1YcoI%J?Fla8H-jSc4ygH=Aym2{R#@v(HI9m=+oiIVwbf`7Z8A?c5 zy9BDc%n3G)DIxGfyQo9-+-;>~?uS024(VXNVYomxVY-5s2=tHc-AZ1`G!WW6X_!q% zl8nj+FHO2aZ3OSTW||=C6ZBX6sdg3GkPhI+!*UbgMJb5N&v?<&@U*JXv7xBWtEo0G&#irbW=i`?ELFjc{y&?Asx;^|Dl=Navh9|y%!?AJDK%jsy>j(RC>0Q zcHmh=EL|DW)`US^1u0c5ACyaVGzaC?8=u_;6wRF&@lwM1vJ7h0A|=IU3J4Lv6z#?w zNx+h(B~vt+RB~>8?=_M2R7-V4hirpTv0fxcvt0jvVwQ|A9&F}?=#YqzBSZ6sIshqZ zgQk>85IRKqFot~&?|^GkKE2=Z!#7libZFnWYaMM8FuNzY=)4+zVIcAVeVyk%PhiFq z`PS$jFo#$%XaOCluIms_+y1ftx4zEHda6SnT+fs0xSCa`b9(J|&{zmiqiA8-300P+ zmZ+~ZB`*unFXBavcAM!;f{|?oOIp_RHGU>;)gc|)4WjURxAwaiQZO|DSyKdZ>-SgS~l4u8$+T zjhFC(moT8_cgG+?j3sA_+k|%qB4`jMI-VmL<3px{A)NAZMs~2V5Wm~cm#PlwAii?= zQWx{3D3?gDYreFS9{X>RME=nc04OpZlIp zd&idU;%;#>Cfh%Uv3-k}pKxLV(p0il^h_x|CPs)|=zum6%q|f@ZtRKl5%pHj3ptn0W}~OC2{K3&X%jOkg`NcyCsISF160cp5#MN#z>QAqZTctuv~ATP z9o84F7rL!h)zYk%dq2#WK|UUeC3WI3CzlH|(t(zVng-D_sNE4?Wy-zc=fhM-b3lf} z<^qW3YoziNGaC#=3jzTIb#m?|V*^rlEE`00V1>Ykv26TXexwW45go3fyR*h+Zwcpo zWT)9?(0ykU_xHy>K4$m0913RHq4Gd2H0ZKbs6&bB2An%8kEG`eY|6d(Lw@+O>W~ia zONI@px-M8hM0yaMY^nECo+3eyu##s+AA)F8|kdjpY}gfa(-TV>>ezje>cC83V!z(sq&+5IWpmMd5O*v>5E07-af zF9QXwxf8x56(oGs_amNjKe9oaB@8X-A|sA@mD|C4^WugVHI;*Hmb3G8(tYT2Q1wHgXuK?zpbfYVI_+{@-i=|NkZcsq~hsnY9-5!E9T zsOZMg1HthWjYyKA&@_lhGIq&Wjidz=KV4U3ymcWR#-SIv2P3CYSM`ExIzk~5+$~Cp z1;je}SfM#HtO(;ns(T2U0(D5u_LYSN^nr2HBqL);LMBPXB!b!& zqo$dR&;~J+QH;XZHLDv~lsElcFzS#F<=e;7h;=cfJ){eR!v~qG_%4tdF%(WK9U(z# z2LWy67I%3xNZNi(=y`rv5jHK3E~KAU(*lX(BXeOqQ3HddL?8t9lVsYFc$ z7GBN>9I`P1(!{t{8HO(US7mhlAp|>spbhDe4z>2y+%+?#)J~5sD$w)A{wHR|2sFw? zjG;1Bk^o4i+)ygc5fE#FxI*6`LcxaqvzOYBHrkH$+kr}x)HkJpoy4SK#2A|lq{7dK zv5u4{S_UP977YavUNDl>f9jeb`^t|xq64|{-rTixH=R@bgbH*wg;hAp_n>;mEL~2< zmqE^_f>)*?87D<LebKM;u{+h#9FU;^asfoMA$dzOyG}cq`7P*iFgrOlF;U{; z!#M=OO3o7eh2-kLwY(-S*WJ=abhrlM`5KpfU_uM4=X3f4Izb;~wPGqBbXKA1#87!D zG@{r_ zkRexnjPy=|o)I#^y@=XJ{1gY&Asxg)Y3l-1U-Y?Pw5(|#hwD8Zq8o5MCj||*7t(=!_c|TF0v54$PTOPjttZUvw-bB%N@Ayz zGJ+4n@H1Ml2ySE|n$nal`eGP^P(GmKhsho3fi)bw{eSooZ&HVJ(1(}EPUusbmy6ZG zo;!_L48%}sVvry}F(!a-pLa5K%ULxv{4(Pz4*bVeRy`Vu@ zv?EpJU6n&$!jr3{EtIqreW)2g<>hyXTG~N^U-whwQipU12ma(`gtBv|jJ0p^~P<7JZYU6I%_`eZnt49Np>x|zLgKKX>7FIOAVVIB(9hcKIJ zF?%psuI6X*G8=;pMTW^W9uX8oP$!My-lstqpv8r%3;3lFA)^Qf&zXV!r4TTKdpjV1f3QCX|yrPv%0%^IUANA9k9uLX;|H+N84)5SG zaR~2so>!+>(vR{LAAy$FgEgD@x%gNmIdb+wV`JVTASzL8UhjQS#Uq*=- zBt9wP8^QB~eXJSECO-`qI=|3}-KRGnjlVApXJ9dO^wp6-=4S#g1E+^mqNd z5Y!(M?T>9+^NMH-|OduQinY7?J{UqQ5{;hk;e^CGW3EHCLZM4LBWjnrucM`%N7B(jfUb)NS4)0 zLw(-Q-c^TmFkimTqb>>;OEc@O$o7){5EjHkf(w-aIrul6xrmW8_+`UjMIV4tIEfT3 z8b38}bu@=)c*kxb(iFebxZ|K@)EE{#Y8=R&mo>RC?fe!q4Ty1QA@d>H;`g8T@*S!p zI)K9+x(%?WSxr__m7x)QG95c}sO94&i}Mhyc&2d-495WHK!%RVg#%vhu#HF}?34ZH z{mjOzLprR(yJjC&Q>~aV*0uLjrjDfSf+I>z8p_Fo(Ob5a;8-3AXj97Qn7r801+Yq07yxusQ;9GQzy24G$>Xssu zq)U(asf(*aI^;u7XU|eEI`~%B?{sWa$0$WIMqe^81ur?G31GD&Q5r^vkTZ(f7)a`n4(VVqHiFbTi#3*Oeiz3Eh`fD^hxh*tdou5`06^h^zCR+1a0Drx#|4#h zejL~Yq8}9r-@Htz&DdA$8~o&n>X6zE-f!$lx0s(e>JH{(HU5~R83lu&4Dqn7X&i_F zD(4z^E(V!c`%c%R<^K@p0YZcgRzk@SQwg1l}ZxRf;8WP1Z(q%0bb;ipgo9DlPxhY z@zD0OSGT_NnyC7YvO1zeHFV|Jcr1=PZ0)Zx{Ob+d$IczwnZgtXN*szvG~`$;XrG9= znzo@{%7iF9oZbxQJN(=&>W~iOH(ublc~qU#GDbfK1#u|Z`^h)FxOiVBV4xO6V_XQ> zNDqs0LBhZ}_a!+34>UhpQ60?z8m8v20@7?s$zN#>kaZKyg^*-n415}4}2!|ryUW5mExzuZsl|g2h_Xv~wa5?xO^SU8@#XGfOIHHMp zTUw>;-}gh_REKmhhopT3=5nqy_+c7O+G$-Il?jq-MU0Fq)>M47lXhKxk_&G>I z_qBdfS9M4Scqmct2Yi3LR1k!`Dv<_a9s&u00?#;@n=zwj$a*mdlEutsk-pgkJ@T`x z>X2%b;$ReweWQp6;%GxM+JuNkEesxHPa-bpY zTg%1dbUs~5a(rxhjDtCxm|9{x6rF;H6bURLhO(|8~Ul{sY5!P1E=!< zPA6J2ntelF%>sr989#$YS0JWSHU*;RXhcD66)KBVPWrR=d44D?>W~iaFf6biFU8A2 zv&_y;j%{X|Na$K~=Y+V%!WhJnUc{4HUY@)~szwC&BE;@xk^j$^c)3#5Asy(!GG{+% zY@IRAVFVxVV}3P;n7SAlQ;CxEhG~GLO}IpmXk({BIfBap9~lJzBj0 zZ?5(h08A1=hDQWB{6P~dXNY$iKp z^HJeIGP?g$Ch0{8oF<@}Nya*&-vO5l0VVoH35?gMj%ze9@J@kXg>D4*jBL$+?1z4Y z{;e*gLq3oqH<6!Ad5O_Jp~!PSUgBt|UDDB|;!Z;zmK=p@CRsYZM~+sMOi*fNJUy_L zt*88aed>@7;vfw*J++qA)U5WFIT8l!Gy3Fc(Qt-_Je2W=XKJNOgmW!I*t|^e3-R-| zR7Z1AhH;ev%J3EL{{j(3A|1t_2x$lA#!%G>;J?uX!nlw$7SpW=mZL4I-@4=F3sgsR z$c9(t2D1J=Nm&3r9eVF(LEG}M&kOVBI4*U#h@q{AXrc%~anXxlCxVeJW~iY``1N}t7xTWUM*aWv6&;0Gi@vlP>&z8DY#Hbw~LB!)`S%wqTG)m>X9S? ztUEbC+kcXuJ@vBM>W~iqFvNTnKeOnoDa+lOGfcM(vQeXVKWPo~yx2@(Zfz*v#ZpIr z6-91~3aDgwh}ZNEO~>Ei1*&9Hs*n!qpgph!)htfV`|T_jjAQJM!Nq*Q;KqbBcE%eM zi8p$;QA7C)xhygs>L`$=y4*gq|HK=;z*UEIaNoCgXP@na>xN?Hvwq``p^r**7qATE z)S_L0lM(eW#wSolWBQkggAy_Z#dbUV`>*^6gQ`P1^sic@cQf1783l#=PS0-drNc}w zW1yJAFhq#$4fd8ix&vNgIEhdn6U8hf7KFR%qqeS&=0FYJ1iL_Kt{*0}AXy84KB|Qw z#S9v6(WpS!B1ZcQEgtyiaU~;7w|@AV7~L?THloA$j?snd>+X0`pt%zgBRXt@KF=j=HbcS~ZtUDl7{G#{#&9bhJv5woA4Jpz zC{+d&@KOpROrnL~8KI0Bmm!(^c>=k3>d;$dn-I>g_;^TX>88pA*#{sWU!vl617 zG*l=Wv@*;@_qoD$_5Q?m;_sR1(XsrU<>6MKVWg+Pv$M!?p<bJJZ#ZGqTsyn@7{t&Hf-(hjl-fVX|p^Cmf&G_|d~4vO8gH z1JHxu9!=$n2@Sg(XLcIYjkp#>aV>;{2#R&DJ?%+MLLJfp9rD6$pfXT8#|5XIxUIlW z7=s%^9Acl#X@zU@Xgt`8QQhzJA`UmUsN$zQN1+r4H%f4wcS*aIO1|tgchi z5S6$w&}9^qC8JzeKSG^BiHpx#NUG8iMxcPl4!!%RvgzQ*{Ip=yAsy(UY`+gQ0};!~ zbhL#S)6k1DbYJ*9;adbz0xt>+3f?@p98?TOgdM`p7U#?Tv=G%H9nOJfy&tFSXp45( z(i0pokH?l6cLPJ=kZ}Q2kh8X?&n9Du5rZ(eY;NGDbMn;`_tE7G510|AA7D;4!IJL)uj&O>I(J5`CW^dZ#+1gE*Fq%pAnY zES$`u{;0zAer!?EbY@zhlBNuFHzotb>|RKJ98hUWD-}Op5==YI^e_FKQ|gco@#tN2 zf!L~7oX$_E&Wyor!iaJ=vgHjA0*VxrDCnlrh=Z|={3R-h@N{6*OYZfvemHOHkPhzX zv3vpd;MqhUgVrL1&IBHu*lY3lA(vt$MQshqwWh?X#O6~!=x13IZAgc67)RXxR_$ui zMP1(ERF<=Od447(sIjHRH&Hy$$rBlRY0d5(%6)5gNEM;qE6Pha6l&q*87 zL4JHBtv)>+7ePj|g354MQZnQ_HFPqto{4zTLhGPmg^5yu)Kg`d=Wl-440(5UtPbhm z4TQk$6`ju%i7~WO1Z8LigqW2Cv`v}l5jA0iyh~vFb6NZ&!S>Bt%9v&9|5oX zsFz28I-~A+v|NX^{yNkjv#N;sbSumQdLA@7$;1IXY-I76(`gpqvM4Wk*(uA z{-l=$)`sN$A6}U84gA+_#Nv8ZjlOma)x|{Q&DplR);oi2L)kK{#=7!mhYKrK+HK!Y z_HziUL%Jm$2$MSi_BAT$+l_v#A=bPlqt8W+#7>5=XNht&yfezeQHQ<^Em;&WBbz;Z z*iWXf4(VVHW&eFJ52Z*5vZ9_WV*CvArxe4H5n}7Ym4vG!f?u%EP$sdT=i7a)r81-j zWbf*7pw>vbN0NY&4!KX1Fs{|m48>6j5&`rU=orX+$`LGQ(3ziFqdKGmIjBNyujKNG zB9mHV1%U!)VWCC9l`K%9Kx2fgkNHZpqeZEKUUMwsd;<3S{jkf_AsyJU2daQQC^0O0 zBFI0{B!`t=L-(Lw9KqaXx*LLImnR`6ph5t7?DjAF$sw$Si6*3SJtb3^bmyno-5(fv zu5qe`BayW&9Br~C5_>TwCQ*Vumx=q7@b5#bHTa_cz{{1Q4(Y&-J=SDBp$Sy8)pVSt zly$4%kdjD_an z4129nZbS*!huo(O_)my1MJw;|1dSZDRGPX-TBhB3uiB0LB|nDn>W~ieSlvO^_M!S4 zC0-!~yrzU+7SB_0D8QW7b9@!K-64oh{Aj7(`MgDoq_SDv%_Q>`UnIP~?N^Q;O0-j_W$i;oKfwU+^prPYr`>!;_i2y$c#ImKs555>TJc3fN~G}|P^Fkut4Z~k+T$hG3Pt59g-i~fVg!*Gf4mz^ltfl0O`KZXkF!Pq68iSi7KLJs1 zmTk!UKU(+^RaA#{NQZL6Wu!BRT8D)h+7zi6ESq@A(3XIj7NFaYQ3|C|+?zw@7kRk0 z)<^#K_xQO<)gccoy#%7CW?ZVBTSng6M*Mkp7?_ zLl|{PhwSxRXQiVP-lN*PfcQFg6Y?l&8$>le$CI9d+$A-&Tu`&;hvs;#5{kD)1-PT5$gbKj*4Cq{BTFOm^UQCi2XXE({b(-W!MKhWIMGfMzz@ zZm_JvgdrJ>MsYXfmK5a;ggg0FW)GpC^ix4lhjf^Sg2@idsIvBIrNeuA>}sL#CGtJ= zy1O`ooDw3qfyh6u#^~nZ87BhL7#-MW##i}yN2)_QxPwmk4!BRvk58pr%KJ;GgUGWY zAA?%@`ROPdI!z-SDrh~RsRC<(W#-(1SVnO$5OyrtyLD!jon86)qSYZC?159Z1NN>N zjPYfrD_HU3ijntc6dtZO?uV$WAeWujIRx1Pa(iOGW~idPzu}una3F$bX83# zgNcgi*z%$d#CME>75lb@(#{kO9TN(Zyjy6DGnGpwMYUJ3{--Biu&P5ktQ&99&CJQ= z_vaMvtu6EzN;K|SHA71j!wUGY6$I{*fySmSndd|toufJ8?+VJ_^uvKwhjb`M&nnBd z>ScxT&66q+OqQ8(>UlPY+@+jXG4V!;w}n*2#X}XJ-XbQ6$mLK}dXSevf{S;u-AvIq%Qg!`E(!(t=Laz;apshh6J#58P3h83r_L%m{ zYQykBQH$?hw)kK7Lyb^}bg)N{?Ioj0hh@?`*jJp|Fe#bBhW8A!r!7Qjk$xOSZnV+R zdF7QMDE}8fbtiR5hw=qOb!K{BM0@u>=%Gz=%ZO+jHkcUh!tdo-L@L2>0w*!#q%4ce zjIYwigI*oY;dsSW9Ga&*QdbCFGPP;aoc}_vgU1~frpV5q9Kk4@TBW)>zR=|AQ1$$mEH-Z!4@f&6qBo#srigaTyYwNEZ-oy!Zz#pG3ry6mfnO!8ssF<*BN5Xdy(B57~6U zkN>qgqyxEGPP3;-zfA??^mKeymC6}XWHA23#Ym3HhV~eJ7fyeOkErfJ!9fYi4#a%G zPp?xQ(xDsXs<)Q#tnJ43XAKvFf(o(?9c>hRQ49tdc(zN)uwZ*aA7k3b9x%7x$k)P~L9nyh&*>Le)9<Z=%)i?rD=fg0M`4t??yQ&JsT+7czu8!vLY_y9m94&Fv>{t7(TpFZu*tzk<)=RUSMF_fZ( z(!$B4$dl)Aiy^=Vd}|a77-}gp6g}&-w+~qP$)VLD9ok{)`N|aNMH+)62DRzs)T+$c z+4)2D9vrYyU{BCqWi%AE4dDVo$Tc+N;f{-UMhi*RP6HqNX}?fPIuw%kKa4vG>wpie z&E3H7axb=1RKCX+xTd+nd;?aiNghLNA}8hdoUq)9_(kBy9-|-=*b4Q1Kc|m6q#7sP z2Y`+;x;nmDPN4v)M+hVwK(rw!%|=X3(ytfA))w!8+LGD+sGmx_I;6w7!Lr!(*;3pY zTSyr4l)5 zau1Y^ZM`!0SCb3x7k<5{Y`J!9y*Yi!0r91RxxqXS*Dk{oRR)a;VgE`rN$4gz1L;q_ z-pda_9n$>(HVT!y*8Aeh?Q3{rSeXyVn3iBLi&m1@7PQzoA|iuj0a`PmlFZI;3uI&!%^?)1`d%$^ugvb}IC+S_lT7F{%O zBVoo19tm8S$U)I7kNnquYAouI4*4)quoL;FyrekIF#!ep1GY6v#%QP>Tzjl=m>HRw&)wx4&rI;8sv9C$mCX|ByK<+dI>%@F5cRG>xu z1xF{UX~?DXZ)oQjFVT2Hnvj7HlUZKjm!9zQd8M0EHa`&2u2>vFG>ga zb|4*uAG)D08dKt@8Xz~8sqD^unIBCkbx4QwKyJM}Z^Yh8bmfn$* zXoPi7y*pyGfaxE|+cLq(nZ(Q+g~p`ANW3wjZIph|4@FiT@|^d7`iajzas1Vv8K$0g zuJR7-PG$eVq-rT%EYvs}5)Yakuv%(3AW(vZ_stDu;yf_Wqa`1C+z%B(9nvlP1_NYQ zs!2jUJ)M}vf@_H(KkJ5Se>b+<`I_a;ggI|QEjbK1v82@1@P;P)LQ$830i8SBC;o_^ z5{NpagI$h4F;?fN9MNkZ=G8H5%wq%{7ZERisRYWWhGo>~Bg{Q>Dj_T=5Exi_{UF$$nfm%#F88w&lz8Wh3iSZ>G%2KD2yZOPDyf&CWJ_UDC;&I;3rXdgimXqd4A*$63F=Zuk(qKxsW)KSDUkNj*DI;uRH-ptf zk~Vf9KhIA&L>W~iSVEKM!J$2KENgR@7 z@A6ZFSBG>E2U_T-(<> zuYWMBBRY74h{#oVmKJ(4JDXIqv*S@i80sHvMEsGvu4IEksi%|KmWXW7>O|2!frW+H zg6#~w*w3z1hjcJE-(Ht?rK`^Y)3yoZ8D@>HMmZYj6=71#7RBeAjF!FtKLE@_Xan)~ zL`t|y%MAS#IZIraGj9`p%7-&km@I`UDC0-XP^652f+A+#a;! zD+WOk?{5ma))CEk9|x^EnnN=@E-#_c9J7={a#RPHa->h&Asd##az&;|8h3OJs1(p| z!I@T$*?ZRn*B`U$hz{Iv%w7i9E91IbGL$}CGSFx4R)%;Ax>113Ys4K~sW5u5_0EsR|{T+ETVM#c#;6r!tev$IgA;9>Cg z4Q7rG=|rRa7t(iSsst-(6ET?{BBd)5h>+uWYBGvIs$`k?-|eS>fW?e1qys&Wz;=L! zLo%_-A*;Ew4H--0d5yq!oo6yv=E>$o~G9en@}nkQ%h^ zuu__5)8C`%*CcqxWhH_sv>iB>(ecXhS3_?AMVO{#hELFb&6~WOTk4Pw?dZ9+suo8~ z-6Ng>yJRu8t^vZxbZlFm*SKXm4RC5L+CKs5%0>+$b~wj!#s@WFATi% zCG;;kN;9!=-Tx|O)G0}*9U$V)gelJ#WV&!YN6sFkDVk&yt|7`m3h|LFsiQeaLm^=s zk|twH(O4s!Kn4-76MDB9v!u)qv|$0&9yM!POAv7_!{FABToYlRF{vXugahAt2SS@R zZU6Zsm_*Se7~8G|y7aW=d8=Y^L(!UE3mHZLV>34ur4q)#6|<;Sy09rBvUlX2t-e*JVhEU(cMoRa5-i)t`lB+cOAG`g+C%vq>I;2BBq?6mbx-U^||M?_jzmIPAAm)q; zP*|&;KY>Tfy=jT{ghrAx}bo?8) zViFbFiwIef%1FV7!=3aRF_4H$E|j|+O-9-36F>Tz>X6q1o{a++|D+CEa{44S?|*9R zPz+HDMyDH@EKCBJ>Lz2woTRkizt)fO{ASqEkk~%27?AC z`g~aNbO@H0n6g+A?A!msPp?58(g7bjU`ul{6@Oo0{4T{Peveg0NEeT{2E(lo5m6mr zzMu@@*(0~e@RXrblyG%rypVmoHPq1@o}mMF1&`*T0j-B&e2SJd!r)oPcGeV!6&{J~ z&l+_CYU)`f7t*aCyCytWn$<>h@P;vptMKeA!%C#0bT&V$jx3o#AwxjP5~CbZz=fQL z+F(r;AmG;&1xUf@{xMkqk-~nRpYT)7QipV?zjfU-c_`EqnN4SY>SBC*ED2*CpJp}l zQ7FR1jm(bG35|Jdi3$QAItO-ci3|Pz$UA`}z|{>6QN-dNNMlqr zLInM*VuA*7pcH2?&N7o`)ES_8GA)R9Fcvtl0Smk1&;5|7)FB=C=MD$n-IKjHF>}cf z#Y4z_MVfI?QgEtIs@EP@m9Ql^`s79yCsZpeuMYXVnqK0h0 zquq&5b;+fPCPILl8|2eGvj5!E3bvW?XKVtX#?Zi0?_g?`HcYETThQiSk; z#l@CG@`m}umWANY7$%iRfyr}J@O{zGl%hJM!#B__)|>R9?R?SAMt_qTBE#G%JYl5q zQ5=mEQrVPN5MO91jDA~0{ab$QeAOXk{=e+U#M149bSKow`DtC@p~c}pws|R~3CRSJ z>MW%<)NqSXK1Ztt_wfoUE$&lfxRw1ZkH6-cw_*QqQ-^emI}8#Ji%YIjF{&|2S7dyV z9;|`1CvQ$HE{ycOX;R4u8X1$bFdp!HYc=y95bxfq(~ zP$|%B;3DLcX&B;1LntOsZYc>PBxT>*w|_G9vTo{-4%{$qdkNeLZT17AxV)XL@ot!4 z9nLHmZ)z%i+>cbXyMj&@Mc|^a?e(AYvnLU()rEA}b{wdH+%Tja`{vN_6YC9WTp&2W z1Cc(RjFVJQe_)p4oF1c75ZIIEBYrlfI^_7q6ha8zDr4YiHZ%N-#VN{ zFmV-~D~!?&+{4~UCh;RJp$_T54f6+=HYOhTlX_vQI|ulAkk#-@rkM^|3pE^O-P(YP z5A|p``f15Kf=;993(|ku&+@55?nn4AR__o5Xr@GLNT#3=;X{3aFAuR}2ul@BwgMr( zxC-qD`0aiQ8|sh_;q%tlpvWrjedjT3ApIdaWYku!h2V$ro3kcOiFGr7wcO4unMkdI@U~V$#og<1#mw@s^nxgjJD-nF|nwa|Ay3|H= zm^VBtq1Lq3fEi`wKR)}kyLEz{xQ4yyYPVm4Dk~l7!jP5~TDfmx) zzza%sNQZJ5ogGjfS4%t9j^vW)FuS)Px^bfvx`d^JSD!FO%HSwAOqCe*P`MBlYNVG? z;0kKSg=9(p`)~3CRvXfR-8f32y>gi4O0C}lL%L`9#EBU6a;oIZuKB>0ja zVzmozqVqeTabw=l1=3Mh1e5ZWQP zrqIBxSA^wK}B3_o_{NOQ}uYJ4R(*ntL0s)?DWKp8J{SNQ(Q} z<7a>7iK;Rw)5w{1vZXYQ;DKn7vs+2AIXuM>ldHitwtmb{@lPGnA$!9nvhy>hP>i&0 z8+ooyrpx8|Xs?>2#6S{6&jPWT7L^j1MTo~E?u9iLDGKZpzQ6uS_clLUQytO)eA6bt zyL@@*cvg!A=IRVQ^>*^B8_Q#xh*6N;T0^R|)WBp;x2bE&mZgYW@MA(0N7`Wnsbu{E+Ve=T_yr3?^x7DEygSAX3PC_X%Ip|R& zDSZ^QRz=`n7U)%? zw;3)3++nGwF26dO`^SIG{o~)9^jGg>*js_9MX=l1Taqf;D@qo+M0<~CNkpmyly8Lpou->WU_JhZtXNi7HA7o08H96U~(mEsDM|Q zL5E1B7jUC$QGo4DA!0wBVRcA{?oE5p^$C<+HxS<6|1t5_LbNBP12&$RBhS zhD|Nxp)hZmhV3MAy5)zv%?{E<9A7`pp~vht-%?AVFikLFLNv7F4n}GqK&A#R4es#) zLh7QR;K>tD9npdNhGF>*Z$~WNa&jX&pO_3)NT63Wg(}htF;u2M8qn|NkL?-$89y&e zbx4Qj#cMov>rKvA(~)Nv(riM`L*kmg1$CX!2$yo{wjjI-0}s z+Fe*w=LSa}t%whz{Fpc4AwZ zle6h+v~>-GHfOA-qNr$SQ6xF<|95nhO0m~Kjzy$-sr$>TIk!2{XZ_?7TVeNw9L48e z${R=iR;n&HSO^P9^c^sTX=CZgbl9FU!$JcMP9eo%({nAp@N6$@rw-|`y=c95cwu+b zqYXIQH<-|xaHVA<^3RW{5tZh;;4OlR1t%jNLJ`&Jm+IqwirLVObRiv_SFYh8&w*o8 zeGXykpm%ahn3SuDb_>lGhHxlpkTr$WN#WcvibNBe85CQk|C@f+NgdJw8?vbBa=x0M zPNguq|0YIytcFy_9B32DI4rzR)MNO46NzmiWXTjg&(eATe9jMjR~^zJdfs}eX1KNN zO>Uvp(-cDZKY_N2Q#Tb1F3dCMJH4b#*ic;u|l4P@U&8sN5EXh$`el z#$|LhqZL#HDQbo^6pv{$*m1>w?kAa0hjf@;z7x}f`!`l+d-GXDr&AoCnDvK>gM9+_ z+wjPEETSGpdx%55ra%*0(X7w^Q!nSYI;4X&oC~r@H{q<_89Ty7lMt_ToPQ$BoGDEA z1SI(3>__BoF)4$)BDLwqZvS0Bk{;@idl1>Z_h9cD)kzASnEDX1+7;UUDPl|TiSgt@ z4;Oe|ma8-6Kd{_%*dek5 zF*&Rp6$?L;PwJ3v-A2G2I5ns#RWS|~HI7O!vvy4QhGnq|9yfecMT;>nZ0HWO!W%l6sZ9gq> zbw~&7`4=~Fjdsx(7UtchCMA_X092w@nqGuEqlaI$k3w6Lw}i;xlT`nDU|G!^ehq0NOtIN}f z;<#DtJ=3ugL&H!Ga4JTydnAwr9uX0K4hqYd6 z@mK%27i8*?4%s@}U9Vd`T{OpIsL1?mBb2|^{2K`~@#JDWfT1~t%^+z*Ij{Pb_AeiM zL8T7qP(8k5y_R$rCwC?%raB$vlR#UBb=L|Cp#@=D5HnQ9$A;7(4!fdFm%u}}&&a=W z%L_bpNC$8II$keYTQdc5I<{&EP*ps<8pO9C0%bh28Wu0Zu8v0wwm_#=BzR^Q?U zmO7+^wS9*4u&4;0TH@~h3R;xmoq@y{+SoL+Ly9($A!pW~iITekKjS*@r!Y@U=f^UwCbo@4a1YB&d&JR<~NcL6k5O7C=wd6pqFhS3khfsNIY z`?#MOBy~uK@?~2nkNZX3o0vohXM+Y)7i2ssRLdxCUNSq4x0VRU;>_4F-_O$d=me>w zIXrLJhews2AeYTRPFM4oMxdYzD1st%jt3=*&v4Tisiyg#*ll_1!`H;ucZ1Xs9lkg2 z$0twD>PW8c{ofrLXGw_A6}cB&F49SnV4%&A(limw^6q4k)9%RUKH&wSI;2DR*8K>X ztHOP0II7TFa-`%5%tm-ZYYBvq%N=wgWiYQN@HJN7F!+C!Q8&` z+=qA55_?Ar!lISSMP6Z~89)JmUPDos(huxq`YO zd=$idaLAybhy+y>OWQmp!$hSI?kJOie39}-`2LxfO{xxgZl35zs9(Q#o4R!zyBJV8 z5o=qxgp6JRhvs^~@E=)BgZ>HRJNO;&@$3S= z*A5O&OV%Q@fyfq7WbkqSsiQem`%ll6;r;)v#sik#q4?YKfM}3j+_;t{V%X=Il8)y@^5>qhhF!V+c_r+<-9yaq&XzVp#mbo3?Uk4H}nAo-~aYzFZk3U9lq_X?PC4R?2diVkLBJ_Muet`gJM{) zMpec1%aLf?5THCc8bKbLs^iKSo{1gYagv`2*2_*$=N`m|FN2C6hhqYh+kP z=Jc>&5+J)8RcPT*7KCpktM^e)^VQmr4&L?=da-&mfu?n}vW2w$5F=&;c}Jw8%ZL~Z z9-xfocD&(bbHb-%&;Vil(pEly-A{jC9nwMEE;ZdbSV5yMO_2gOI%sn;1Stc7P;U|a z2|6f1bn<8)$ECFEKJ?-DJlhWqQXSL*+fLU6*Mzb0ZZ(!_b5?TSp^lbhWkJU-;sAx6 zg=Y};PZaH`I<<^m+g{{5{k-YbA@{D?{A@LCMysB5iP1A?Xl{_C&z zf>RyV;oN%pY%J@+7A6p-YFlyeqXAFWCqfNaJ2Kma0uwXVqDzaW~iK z+egoLR@EIbA1H@w&jP;`)VAsx`I!r;b2 z9&T$gzuF-w5=Je(F)E&P`gj|#fhqAO(3=fSPV~=g1LC9a@&ZsD(gEBmENuXM=oVms z>newg$iv`-T=P=kbHqy$l>p=ib=O)d$mjjcC8|R@yjv}$4ZMn4Hd-mn?P0}P z=RieT3Sdl>((x?euF+3oqMjtjew$zP(|=cobO^VKqMHaeifChbskLf^+)V)q>ERmc zGt3FH95T8Vst6UQq>PzUlrQt{C%kN2bx6fRDu@)6&frS46wa7*mPR~cVblX73^$=V zLXB>ysG+JQ2p50X3qo~Bhj5FUwXu-8=OC)e80Nx;hCBg3wMBLq&UZzbSL`MND!}M= z)SXR${_|IO0jUn@K#m=-haZLw8a^@NF>qm+wTJjfO_WzW)<^VrG5?`Wn3OD}tm6MX z@d8jC(gEBm!)&ag=84D=Dzl57f#O5D1qBW3R7Sjk$>b74`XJK-}F8oRZY^bO)Zb3Fh{If~x{hB%ftXjwS~QUN&9T$zUT=*z(ZLjopFw{7hGLns{(miM8g`%zKu=O{__8G&86#`DUmj$gE$ld zFCeb0HSrj%f>F6FDq*O?F5p#Pp>$0}uXsE-7$FL6$H8f*!x4>tG$xbnNW0;*3R5HvDy>{uTKCKSw z)^fPDx8WZC^5DP2E(@bj9U>8t%l{(b%=dAW zsG~U~LwjV6M0Jj2yi1uh$?H-irIMOlA9-t92=rMQ+fR4_Fq+nTL;kQI%{p~R2W?3D zH=vo>Y${EXi9rl=Ha{DkSil&D0%y~}cZ^#)C9vm^+GW&^n+{0_ab9U^*!}W#P5hL{ z)FB}fD5p)5lj<88cSr1vi;lFIp{}0Y z`8B~c_i6vdsY5!PLj!$dF^?A2+3{pnEtm5#c+~nbdXo@0A?YxlTXRz415h$&1G^eq zj+lXKhOA{7pYr1msSY`|R~Pe|(GxDciDLL{)#chjB3~ic-CQ^b2Nt;BX;>{>Wg$5z> zZUXYpoOl7L4(UJ+#Q62HSxVV{Ee)R52q5J$4#kU(S9e&WHjm16PEQtlKNgdb?A((v zt2&y)vyq=%Y{MRp>Y8}^r<=PD(2cr+?&D*if$@c`%JI}?+$*CRM$J8H8Fxd^L42}N z^+KOk_T~SZdD)lhkZv7^_Sltu*%KZg>Ry!Rstn)MaSk5|7fPN(HTMSIRc%FU1xf)A zz+TTa_M{G_4e8)+ydth_Tla)-x3&*;kI)cA5sSkV{sbyt(6TxTWH1o&%%E{be$bSRfmq(lZ}{=J zSBG?Phw9P-K>>#F`(oL2g+%t=G+eY$>HCF{$3;0NtQpo}o&(zwdc1{Uu2FncYpxC- zX}CI?gEVw=w;`zxS_-%5trTEkAex5;MlEzx*tOg(5o&bUq~gul>(Tw&YeMWZICVq^ zacJc3f_OF?r7iJxg48L*tJZqOVQq{NJ%=hrPjmwVj1(~Ki0o*`?1#MIQ-^f;hN8?4 zeD+4!yEnx}2I)}*!0AxuoHVo=Ff>L#3(gPvUC-_`;v?vR~C9 z-Lk&m0x4qdNPevLC#WF=f3_j)7s@V-YbNv_L}fat5z56Ef{P+CK=N~Rs-rm^Yr5bC z4%JR9&@D$68yVcV=1Gq#Rhm;Q+#a*x%?szqmw zQNEeu3o5$0x{wal`Whcli3AOeq0?zK zIi8pAB1ZEKO?u}?z3fAENC#@2PY$5wGbCtM zt0_ZmjTy}+4R6;Xf<{f3oLsAoNxBee;m)o~Y?j5Js^wpH(f-a$z2H-abokb8iR>-LSx8a)0>ef*w5^*{#7ry)FByxf_ms zV#pWAh+dHXQ(Nf}Wd#TG-JJOdL6r!CU=xaNi$(Rv{m^mMAswnUH)U9@Wp#2kGt0@5 zLDi%}@pud(4;jivh#*xZ;;}QgO50nJQyq| z>0A6jREKmB*UXmnM=8J9TWg3QiBSTB)`HAGJeHcGCS+84(oD87(1y1Q#T5|{UDv^Q z|G)3%O?FP;qi(|Bv{;NmUr#??Q(g-;Z9^exStTJK@KF05q69joFWCy z;Y}yph-IEiX7-`!uXwsB)IlA}8&_)!H!aF`N`H&;$wYoPSycA~>5Ze0?*)sVXvQ=! ziY`V>Yj8EheMQ{CxinHJFEx(f=o8yA%HQ*IimF38sO$NS%e!?A)p2+Z07dPV$=NuA zLre>$prImm6tG0Fdr6@6;N;67QegAZm;0eps6#rS+xfOzQx1jYp@oEAfDa6VeaO}X zIV$vd4!=KU{#2-|86{4VZVD7~9sbP!J{&z?*0JL_O4^VP-%ANCrhQ?%{>_6#8+gBaZL0k7YFGEB4i_G-AotXP((M*y3 z9Xog!$fvoHgiT2FLhBGmLQM;JNiHURF>Y(ZO8z{Pw^+JAQzR z>TG@_jlWT>By4Sf#Q=*)2FF-YQ^NMgFAK^O6(~kjpo>GW5Ul^q3s!Z+@vrgT^wikX zbsQe@xd!_OydNrF^`f?&$8=D5F&5Btsj0OA^U}6H_&GlbsG~VFYxdG*G>=G5 zvnJYIL3cT+Su9t3?|B$YrsG4ytDb!blK=;T2-_kgL8Mq{7$1T**;@X@&--yhSBG>< zxNZzx0lA~TJBC!8+$gLu<&a}z#E6u^_+9uDyc0_i?xtXm>Y4rYe$EedLLKtJX1%EL zj=}2^cp&)Ln=r?mg|V6xT@ZZIDDE=0$iyGhx3pTUlKS89lgFq-I(R$s7&%#PUSw=H z#_$Q<6*0O7RXlMnt|`!?$`IEW>d+vISvUl`OKF8YcKfIN=+CM{I($2xPY$0=)W*;; z*v3FKnheYhqd1Gt1YB21Omku1Q1s0UELdY(oB35gL#OJH4&7S8;L2xc)3h;cWdYLz zDbEb80*qJ#x+x(BJ8_UfMaAX`;vG_)r7g((DL-kNI^=aY&ujx5{mPU%OC!K)EV zl!}?qSNQn2olhOrvcMaOY6Fh z{tK@LaZHvE@JBs~c56dAY_T!V`+3h0!7+9GQ!0;aH7fD#)njtXHNL+w3cLTj3 zVuF)yXkZI7XMWNcbw~$xM`6Z--LC(R0maqW~iTnxTE=yBywvV*oR3zLx+q zNE<#yACEzGGzVwk_!O-@K>wkiYeF5;!Q0NS9XypC`_o=Xe>=T)@X*set!_MZcC`1EOWsMINe2** z?*idf7-t6gla}d2oJXMRV)sxngT<%zp(mg9m45UR)j<#K&TclpM{i;bAXLW=y*(!6 z7}zKn{+h>1U%ysIbhx%FX5*ye5@JD9L0)P|$Y?DzRN_-q zlyEu|;UOCFIVNIaM(pE|R7cyp4J{f~#f%f{Y3|Lw&F$&Y$pyFfRzr#a1}9Z_Xq{xt zbfue$I<&ZU;sF46iA#?Pyr&YUI+|OsZ8pIG=@D7A?TdQ@BBR7`v-U1tgG2^uq)pN> zwvgtgDv+VWBv%Y^FdvNZF0K*j_A~{qj^-9`yI;GBjQ9WhExP}oyn6w*ZAX{GPI z=Ti0YUt`QMzVUtk@Be>=tYhz1-K^uS!}Wojeuwx@NG77=GS6On1+bw<)LpljtfWFl z%DINaIpZuYb10sm*^nJAE#_8-ZksM&Ud+ux2E13uYJUDTuE7qt2rN%MH+<@w*G~=K z^v!29^LvN~D{njfJ+FotNo2!Q1Huz>87h*BFeq$BRXg zFNxGBW8kaLR#f2HYS4t+@?vGq*JSg0qwG5@QgcJ_Q|sq4&UPJ?k)dys&_tIMH&@CmhCL{HGT&mYVm^meToaG?XO7Caie-mPKgrjY z`$^88s_OHn_I@T)^%m@;V@!xL*~Ea&xWO@YD+Ujo^n00OB0FTlpL_3fL)?Au#~C>H zp~rwSnx^g}t4r#(D&^V(_+jRuu*C}9`?=8)!rXfw!e7OE-~0dVO$Y#9Blst#ZEn3S zq0YVg1NEi5-#_j>)YxR{MVjn(l(BFn@)d*0OLrh?x`vsXiS-&cH+J=JEb~=ohfL^m z@BS<3-TgW6vB<}Bc7!m_{1EGrms(77c+jZvaxlV5l&5kJo=xh@F)iGQodG=9wO=f9XsIUz2G6Yt9&? zIt2@^35$#A+(odU&d8dDxG*_hN+R+?{*AE#zN5?r$PSr)fz0ZghVzc6+^CJiT>GW2 ziOo~t)t#2|n>r($GOVk?U>PKnLe;nIXbE0s^=grKS+NC(QfGe8<@#(=umP-!9>9pV|+cGlVAN)DE_( zJxknU8apcue>QCuI$twajBC4z%2k=_TXx6<{F%H1>=N+J!_BBkb(H^PtCnH=A^zB^V!}f+K4v{^PjN(X%%h83Kx8126M9ldP1u; z4K&$aSCW3_i^`N{vqL8M*Eyj#mS*wFc3$WDDf4z<<{yK1!jMdL4v|888*jv#8%|8M!#ga7p};mhCT zc2vr_^RhwM=66q>JtcoV-+L%52pN#nM_XF0(5f+7=d!#N`J(X(`%E>5}KcF43V-dFHv+53A88jAaPC7$ryj9Jh)HpWZTlEG9fnj(M!2_w55&9HvL3vpAt z%Z`|Sj;}8Cb7bbeVM-mgd88UCBN(6H4UeX-F+mnjZO3dHa>BeT>3mU{4U!%5@sEv5 z6&+oM(f%rZ6srlegd8Q45NZM*0L_9Y1C78rRS4E1gca&81OCTlhDx$SCit2490&W3 zdQQz$V=dX6nDBKJ)D!0bOFfxE*P$?<&R(Q@^p;2fv!f+=nf09UudRB{_)-Ei-g~Nf zq{fq7N!~XcEDVH`fti|_JdlARVFmQLY1>UL{8*V?oEY27yK` zUIp5GCaS;Zn^8t%#Dj)#230pdP1lb2xH4H^cF30u{QXye|KKOQ>eUj&SH0>3A99mi zk@4ZMsAsxZYcvl?++pA>7{blW_j9#nt}~vSXo(B{+si!RvqL`aFL3Gf9`Aj;Q@g0q zK&ut+F|sgN+QyEs2C1k_&B-~vSs9vJ4d9>r+F*OsZ!XNOEbK<0zxkqywjGk&s(k7=RUw8|cC-XBI{^Uk*IEJ)khd4058FTQ zz5ixfk74nJFetO-kf91OsOdPXxnt45(AebSHuJtAL92EKMu3|YZ^8;juOg}&-m$<`wu>raS;7-_~jUKy< z+tQiWs2?t8l0e^jZOU=7H0kXGbPSlAM#m>X6uaUJ%ls(WA=8iYmAS|Lm0fYukFxh0 zS+~QP>yFE>^PK|Ps%q9UBxk-;QT2|MF^h=q)&KNY7diN|Bc>lCJ4NjMF*1{(whhA| z2ltHP!I(Lz0NprIxngZ!l8c z_BaZ~?%`6Mdp&NI>q`_vvZE!4*|`vizt(c0fP6U@^3QwE`O55fHNi7BJs7$fyqfqg z%kWY47O?}67r9P$TAu%XnOiSA*IL;s4|r6?2zdPcxP_z6T9Hj@ji@qJAu=^=Q|hR(4BIlGg2RhULjk~ zFy^eo9gyHAUv9en-4*w*zwHSRzs*}WJ7oF|^3V5~C;5(Iw>Ry0tZU;*OBH}gl#M>J z-uxC+MGX|IOW`XPqMm0*OAzzV_u1^%-1!djmGgZz+k3w2(>45or%SI|dZSnwL_bL5 z%|p$R4jWYfE5GSo{f;ujMA;!9`)Iq2zdPSEegNuFBNC*vRp*RFWHj6rU3roiszVBU zwZ}}u6Eu#tZ~pKipLKS~^aEs$5y?%rYD+&?`zZw|M-rODnsq19oIjKC$q zgqq=js%AB!GpGb`|RA z=x_SEu^Wsf#h3lQG7azSkdJw^UBxd4$oc^=cDVM&I2yB<5y=FdO9S(?*V#C6Gas_a zi6?LT0Iw}`wr7WYq94H7p7jIh+%g7Vsn>&de2qzLo{8jpxM=@Uw+> zKyJwN1LU6V-43{PwvTir@kjUdGQI!b^?}d9c8IpmjNkU$%XU2JwCWXdl{&ojrUsFS z5^u~QKDG>rFFR!V9kS2&>0);ckDZrsqp7)W29NV9y1iqG{-ha5?|cV@;D~O{%&C;9 z;blil5VOyB5Pz+m?*aMp`Rdm>u$QkGCuM<9yHg0rV^}m*RGt7&DCtQ{qvb{TK3vvd%~iNQ<{iT^D7}_w0~Q z^aI5Cp7R4N8LlqZT$fh_|Cln>j{9cFrI%uchTWKLgA&-Rd(r*2GCx3e$n*nbpYO2) zuAJ`#KS&|$&E1`C3j$$As?l<_>srV8V8L>v#oe0WFZn4vU^?B9KUv0IB|BvL4KgQ9 zr?cI%t!B@(AW1x;FOHEFh#L|{8EZ$OqBqUDC7wdl(@kFDWY3P40A@~_0{99!*E1(g z1MtPE?w|Ia>4e$wF)%XJ<>NgzR(%egCzLcPzZwlR1SDjo&>?sQ`wL*+F(Y5KBB)mqynQF(`v=A@f>vnQ$JSbILi*1z<+sOJK_rX zu3}f;f3T&d?A-+<(QBW!&dUfNmlNb@G@i1#Hh-doOYh!bMp%Hw&HhxGA0j(s`XS!^ zq(4MveGVr}pM7HFB#gn3bCl>-)A$JR^r<8*7f1=iad*dmLz$l{VL&B9<=qU13xO$pUV!Jz-JB~#|LrgcAt*s((rSQazfYL>tWuRf3cIp z!Jwn*sm(?KDjE!xpJUyS&1z}Mgf*MPy%Q4otTI1DcF6QYWM1sMe~8SBosS5n{d~}_ zVwyIXw{)@$gnP{EPD*o3%KJgO!~J%lpCUKpOaC0+pYu6PW>|yjk(B?4Uzr$3#cJ>| zi5l~>Gk-J-FY{?fDXhv|>)9b68~UYNJwxO%X5$S^;}>F|I|*bri>AgoZ`jDJ$tjJ= ztLRO{N$}ZaDzVui6ZG6GT}yaJn~fHb#0^83erL;rQEySFg(98-6UMfTHr1dJ?aEx~ z+0hcd+$;T+q6(FJonID4n(fzwS}cvjwG>JT0F5j)`)p(JYuTxKWJ8Bq=OtR{v6Fva znchlv$jAJMFCllG?}yGcXl74K!1a<9C2GCnag7M?2T_Zhx#JyT(+>KuOeT{Z@})y} zbnl10;-jOhS}pm^EEtTBwb8k6;1z|+9|oP1oZEPJetlLM=-D9?^h_nN*%e*CsRF~k zUfKsfXE@~hIM+6IQR^L|#1p+ER80?NJVz?vC|em8P{w@SGhbJRRhAtxq3>-nojrH& z=8HJ_52B8amiCd9oWK3l~1c#9k!5uH5hg*&i`)yo*lxUVRgV7)JmG)0L4_m^SRdW;g% zWu8ypEdnR_%2HTL17CFiUqZXN2egXM5e z{U0dpuPB|Y78q;sN1a~BzxcW$__9MLe8axYT03srlhfnF>fr9lgR{fqJAG(Z`qe8G z4O*g8lc?NMY@bOraLi7W z?r8`tIJa@*VbkVVQu3O15dU78{3ttQg7}6$h|eO1Ke)eK_AMjM$+SVcXcX)Q0|c@G z-5dnF37yB4a(p!lMrqTPu-^EgMLx^ykO|{w^kF<|pYMZL^IsrF#X55|Hvd#fi>3NF zHumVUF%vh$j$XDM75My9ionYbnczLu2k+$Q@VVyv{P3XFEa|-}8}n|&7{^sLnX>i` zy?U}&t=d~-Ld1KnK^ChwPnjPo^OVUBnUMA-0~<--zO^*H#WbmjBD-u(8f#d48iI{J z(5dklPWCG=_>KVkQ9oZ~Ewe)=guOT9Y5m-=51-O$_&^RErwmWM%Bm1+pL&kD9ZCJI!%(TAiQtraOxvn=fSsY78S|iD*94 znL%uoa~H{CVPdtN%}le#_3_!?SY$D?Lng4htf)6&AHF^11ox`*zICme;gZt$5HFu6 zOpN78SeJ&lDCtwwADT^(*lqUhue`em)a;N6>g%>=P~+yme{yyh59ys1(TS%st(B=R zkQ(n)33N3lIL4NDwKQKpAdx#j9cNLIGD+LOyRpHG8K(>IC+-wMmmPA?IvyU^r_H_Q_`EtgxZjU*O(LHD zfRul(z8(rB>dAzGs^)Ut|V!=N-%ufaJruTtwjK4hfDxJc?ZCwb?*kHbsoR!gf(pyn2Y993LGXHwPAP|BXluj?k|ro+aFKt{~np*IZArF3f_Bc(~}v*JG-w=3r~h95#LH z#h}}qZ~Q$)HfMIoglyNja@N&?_Z^%g!=0n3O<-4$@*7S~7^4w}uFrI|9xoe?1Viv+ z>HfnziU7ueX3kF^9P7k2>w~lB)_vb0zm<@rlX;@WRs-t1jA1S6q|$*5m4{mI zjICN={K=<^V9XAgFz&eE?=@$tqpJspM_Nf}BR9zrNkh$_6C`41js5Oyqu3&6Gwm_K zgB>h*DXQmpEBGaa7*V+)6S_~l`DW;<`a2(gO}*x7$_iw%8NHQZo5%6xMdzF%&1y}B zXi~#fg5jDSt$$N?kg}eD+4NE^#4_o1cEkknqD^t-X4pXNiWAw-35Seg_FB($y}~?K zC!HBHYfWcNE8>ugR-N0~(=h5xm(U6W)u-;oW%mJ}!*S)!GzO=PHrhnE&G5SYZLg zd^DMSq?ft0uIrh8-dl>`%nq5ou8;A|s3*uvCJVybtX4opziy3}MB)>6C+mqvffigx zVS4qK6v3MvGU2_bChl6-ya&e=VF#+=g0>4uv}Gsm>IBw_TdWLKE!!pP!>qeBKl!~y zpk{|mP7P(5D9!=L;CCF_w=)za9A!zeo9%h3Rbc1+Qy>R4z$ROV~V4w=2A zdlGhCesl&EV!g!iVoHTbC6XMEdWZg-!&qWuKX>i_pZqV2Y}@RR*~^)ix;uO4LoI%~ zBqV7U^f@|HQGO+ZiGc}5Q?$C#&LXXS>Q)h`*&!3u{V7Ct5>Gdo02`(^Il6}0q{9|a zOovvsJ6BY0RhOxh2in`8R0M5y$b|OOwrTF+S$%qV@9?-fKF=ycB2taKOfZe+3uUXJ z1Q#*3R`KBuH=HRRv}m#$lbN^vks{!-Lnd&q-GX~?@7{}j#|)JmOomZ2ArYr2pz+jD zkHhvfwty6;PrOF+-AEt*PX|T7WQTkdOmlqJ+*=*F#deVL*K`nlMqVOPS3+O-jOK5= zmKW2OgKs+y0xyA-GOv*AXbHzBJzSKBII^A|GxhGMM=7ka+YFO!#kXcYXGuC7Gp?8# zkql;fd`jO`sQZ)~ayPJ5b?45Wu0?QcYqGp!L=y)h*9KLZ`DB52i#+6vRL>tP+XS%x z{cjf8dD$Tou-9K-v3(u2U<6s8rXJ2Gttl5>HA+k*m)HClx_KvV;tJe>>ed^Jpvn%J zP;DRh7yD4Z8uzmAoX}C?gGY46RQh;69Lv zXhQaJ*VpULsuz_@N>XVsYor1?aXQGf{5=t|m3dur0K=66emr$=|NPgKLX{gdq1tB7 z7wdKBv|1nPwH&NYPQ3d%+TrdVsM^dT3ick^sKJ9^kKlc~hYe*>d#4|3AkC0w?8(zl zFR~}ILne&dfvgLRcMq|FuL0e8_UmKO_?$^DDtV+dveHOeuNNlD$03l1OtHpL0J2C0 zDmPlfw;jmZhcD|+n9@X8&axoj6-HN#_vM*(rR-lDo?eG!Bh))f@Y(y7@zE4EHinQ9rC3E z?%`?pKt~j4@%LMbD8yNfq0EVY;Cz`~z;)9KSfYss;Q#w7&&~V)nfr%tx^sAb_rdB- zs7kj^8*HUpXZ7K;hv!dq42rA&;D7bjW?yHAO`x};K>PRgwWG^Lw0noIYcpVa0XmHc zVR*!24J%Jyf@g8b8o8<@f16^}&y*1b$_|;ZZ^y3oW50jeM7gD$kL zYoRf|aAALdivC$UF}7bw-RcxXU%KPoqnEyq#*( zfAQ@_jxN>NypRd*c8qfGQpc}wV!iM6-cSt+lXopw2|nS2cBY-s&W673>5PdsL%Dj1 zpuX--umAczMIdK~Opx=l?3`Xbdn9ir^fc3VfOh z!xPyd6X5(b`|^#v^D~|#;g*YxtteBv5#Qxv$MMbuCcF45g`KidI9`VDc^NTyruv&IG!);LRr>W^k2)mgTz zOlu56VsK&0#|J(6jYW`WhfK)d`IuY1Tl5&gsZMb`L)_xdD(>o?zkv}4To#511Gh92 zfND)Cde|r?@Oeaf;Av?nV$c=*etVfBbau$}8@%hK{02AYq~8DU-bJ@tQw5y1#GIA^0?;lhkJ)D&^4Gt zK3+nOL$L4w?MPhrzrM(4kR381----B&e?Z4SKYn;L#jrt8v%%b(|qBH?OxM}p=CtU zNxf7*T1`C8fPYq|;O!}&7czn0Y7ssTc+>YM=O@R9^+A1de0F|X9Uh;&pF?C~r3W41tZ7`dT z=he8KkcTlH7k(-HaJ!}dk7XwOvqL8ITei(!^be^1^nT^2eI6=Kw_rQ!gqRCh10ADO z#MZsk3Ca_j?}`!M|GFY;ogFfvy@C4>!YO;2&STt~2|^g_KDG$T!%C+!Z||{#U1g1G zcoQam9Y5^3GUSx(kO|m!j_P6=?aunyJ>&{JR$Djru#> ziQ{bggg;$m$+ANxWH-D|W;#tV^Zgk+!>^>m{G z{6SR_T*R#MLMB|>5uA$^d%o!u<1RJRcLSVUJx7n(`Qj+W4D1#Gt4aP!Q4Wz;pcDoc-`>z=Y15oqF*{^} zc%2Toxj4F`H7EDvb39>DF{w~ZMydiUL+&Uiqwx&;7G!l}Q}3HS`?KF!1Y>r{oUd|! z7TTgU)~19>yrKglfG=03X&iv#m>J?pF;g0#y%ekX?6XA>W`|4&uQzort>R@T~CmTQXfx_mlKEks6HgfcmGtx(L+lkO}HmN2+;&qkU*d^}c53;r%pf zTO~_9BnrhNmp=tx#4vWoQaPqMrWCuH)yI{2yJv??h(2bUiN0{ZF+0+!p6tv+kv@W6 zTdPOdbR(bL!d#w#7(#2FN|J7q`^Ia_^xd+fB`n)HnC23eTo&qjG?_OchRI-_2jzgo zNozpZAc=b-tE1;U(+|!_EY)`p>gW1e zDkeylV^{oS%6+sZP@~3D8_DzPP+?q89VwJChaDN?hoG=yVO?vzm~kq zV}y<|@;hA#^|Q&!L|3&o!Z9*yGoJ8}R;?&OiqDRg5M3l|mpAO@9(-jygD+ZK*AaI4 z+QXyhvA@pB$?FZQg@ed3sOzzUzeIpjGV>i>E~gQqY119Pf2<5aJUe7s-p}8?VK+R6 z1pcu;RKj5%9o1m1k*m0Gwknw6;W1^N)Cdpd@E8T?o|S*7497A%WWxOgnYhm$(BWlK z;e<+8QQrwgb(x`}>W9pI=_yT~0~L=X^C(1at8i3medb*){B9Y7cy`DH{39=bJ8lX3 z?LL^+v`(jP(fNeh7Uz1MD5nu#f-I>eJdI3fxN($qFnRyKr;q6$1;Mjh*%1?%kG}@y z)_dRa#czM+9Xm_Ilr+$4^#3WmHCk_Iz#$z*S4+=gww5H#X7s7LuJebN$%nE-CQujM ziWkxfWo3GVhSwF{b6yg5-fA+OFUj=M&|K@Tjh0Je`q*--NPeFF+$X$BHgg%-5fiXW zm*W;#cr5gNQA{MHGizsu*9NwPWNu|JpJ>rsR;yh|9j!L+7*D6a^n*oUWrs|#wld(& z#d=kzr`3xGY$=fdP=d}6o@u(nj13HFA(aP8$fTH98nZ54w4= zpD6+~J7NNQ(I313dS4r@iYelq=?zS?q3My?)O()Lo55Q{?7S5XO@~;v5jh4g!E!5+ z-egBhm@Z}-Z@`rG-M#$zi(6r$OB-~%Znre*Pao9h5A=EaKE#E6s0q59%&<|%alp)! z{mqF~6OG?NJD}&sd#kHqd~+E^!t9V~Eia00H?8G!&FNWqO!ZLmIGlSZJ2Yn@uZ)|a zNf?S+R3>$`M}u`U!NJ`yaXzFBAu~Hz@V5>Ll=4C#!0 zAz?*%rTs@h8plG#6j_$K@>&jE;@HfNmX__NBW1H}8(6Q9)w}6LxdO93)10|~cjqb^ z)Sy+MeBY21l0^rtYwD4&2{qvO)krqSK0)bAiB-&wmR9j%aPrZQFRfyNm$jNkFlzjD zI-q}UzuR%H1ir+F0FtuOzw zx4q-t@7w(rG_c;vx0#q=-bt}}e$!9khHaagHPRK2GkDd}FTbqJ=xcV!1oQK*Umw?C zZeF}^y0=@Xppwy`QZGzC4r_(Q@ZZsH8yNqbOlTLFXbY>!AhJFEwkKT9&8K-SqWAw- z?uweQUUb`bZq}@WNM=*(+~{}*T~Fm-@r5jb@d|p{nmGneqRrSX>pv({Ce99-px(+@ zR-;p!Q>rgxwMEsAX)rl|Sb}N{%g~Ljdj_eC^L2MV87}G0pzkiz@yHIDux>pidcM>R zQ)TB-G>-i_L`)=Zi|FRgoDGQdoAYdjbx;^(O6%1S5vRmalpQSryy058_Kj`;W^G>< zzUHCOQ%ojLwI2CskfEOQ1EaTPf`R_KZ?+k!LmXn zeS|$t;g@(!Lr`uSL|p{5$!gLul2+eZ1aWrAg!p16egFD0aj1@ZEkwhNg5)GBE}J`@ zyIKV=c)MY3z*&=>DltHOrC8#>@(E=C=Z1Uj^~RM^@p|asT-H*(8b5otJ6BXNOF1bKX7NfM2|| zC$lgv&QYFVtJ}{^xkQl!UFdd?2uR=wvDYK@rUmezHdQJlB{nUzrtNtdsd`h~0c zTtfqWWa2fTr-s5VvA~34L^RzfWxlmcMv)!zC8EyAD-fL)CM}q^wY*K5;jn4d!X;Eo zqlq;RD12mxOsFsP$97+}m)Ck6l-)hfjEHB^F9<$OJj(t#=9XzDu{AV7#~V14(SFF>0?OyrJ88+lOis zXY(OaQP&oKrc8dJCzuy90lts~&06FASMLCcQFq)B+!~*QOoi{7F+o%<5UZ+sy2SBF zUd!KqRR;E@O2B4^Oqg$=bzD2Bk00FYd+#YM`gXbX`mvWS{S6J*ponQ{QGFeDY4fyE zMJ{poBF5xJdyGxo;mbOnW9WO)PH7j?F`g1;;;Lmxni_ssd0ty~_;3!Jq=0d+k@Ay=$XKab3|=~!j&``9O3;nWey4VeJvq{?Z9 z_Z`Xsh7CYH^}!l*S@AqvH_K|hin%gY72yF-?ylp_e@Yo@V0OraHYb-%X!pPQsD=y~ zP%c(0%F)QG-aZ3O5$0(W$CRjulat<>cIBwkGG}sj$b>hi0kpxp=l&VY7SSb8!5Ayn z;L-8xm{mxGhI*+84X7zK^vFAniVrLE>1KyaSaY898?5`U9tNtK?Tn|*4kDMkWxa?T zQjRARl2R)M$9!eF(a*nJ#s)AuWP*DGHRjsYbN|IR*~W3c?3m(x`I8EBZdlsl^=@vA zYQ@aG!+~Wv45Em~fWi!Um7$)%6ytTy7Lb!knHx4cT3W`OG_k{%b;Hj18V;qwi!ldv ze%wpf5APY|pXm~zOJT;?n&)6V1wO0Hk(3?s@d0PtuxlfZVWmdfDxBiO|L=V`BTM2C z1LYa6AMEi|eZ2{~b9nk(kz4lBuscI0&^fnkTIGGW?1G%M+Z9nArSsBS;CHI`~2!OdcIehTRa75H~vH9VP&j!s&*`T~mnGmK`#o?9GujDEG74hzn7f z^JJ|^mwCv9Oy+2=IBql{Mk^QQp`6B1YkK-iiX1@M5fjYba`6Uc&Im0DTvNd`+9X9h zSBzFRm@z7eG48c81#Arsh3BhlBJu?g(aq3>(g38do9xijnuq_<>V zbf0~my3d2<+z6u58JUo^^R+~2N#`gpT0+-*lU$(7N_)-H@KQ0qfaZiNtoceeWa$uM z09s=Md9e|{tYYUr{S8luc2j1_j`$d#sI|OxQ1!C9Mz~u{pz1+ACM1R2#2spF6XQ^r zEtM4oW6}6q?AmX6a}lK3ArsQxV~nAPp3t&&T~U1tl0-th&SwEt~FC8urz9Aw!xm}I^a+WBX^8NZy0^aD!h?8{`5cp zBsIJw?QQ1uK0;=)G`rA;Rc0Z|GhebjwypWnyRgL^Q}Ooe6+;K!A=R>a06`YauR??o#E!>2?;EawQAAU=}h~P}Oal*2h2?s`+QlYAvUd>b$f3hYK9cf`( z%{zwLA9+iW>mxg4g1QwgzX|Hy>g;ac_ZI@l&Dxv>8mYb&q{$AMfNqQVHvwhmdjH`0lhgh}%WTCFjm(bv!x~k#4lzj86@o1`F8TwR z-Nnp+Xk_!>Sca*T9WrCP9^R|&G=1B3#r)7%FJ%*%42=`Q$Sk&_JyFFdq`#5h5N8W~ zANJdde2&>66TZH~HOI7r1V&hEqx(|y3NPX*lWC6HC`ej}&fMA`lW>hk=ua0xm>n@8 z+{z2zuutQ@-z^ci;8>-U>pvKh9$-YDbtdM-J|-j9Rxz!>uyr;00q_4O6}H|;1ZGD} z*!m8e4qMg*`SR!EYzqmy)^H=|`4~5)KK>Ywd1P=`L#<_Z5tPUnd31ra*4gO7n$}5t znr|)hxXccj7I#Z+?g#zw-^Sh9pB;>pgs2PB_~JZfwo|iWKA&m}V-K#VER5r8T`s!v z=btZQuACh*VeU(WHXAzro1XHXlh-Y>WG+hx-ES^T)>M&eWR(o+0*C#xbKe9)HiKUSAZ%8JMe}YN=@(u$BaXbHmVsw zxQ@z6%Iu&C;OigOIUfQ%(+%!_ay8b=nZpfYiaj;u59$&%52vqhwm$r!$kUp* zDD&jXj+Ovzop-JQ%DPaTGNUzy?s8zZz|&EJKwNRKr266Q&%Q*3o{D|f=Du4-dnh~P zu06S3vRenMs^2XJ7j{l zC9vMGfbHqYy@Puv7z=&p3abHaBiY-kHBJDPHdzdcC^Z}WVxmW1sgVr@$X|F@5s=v- z6Ub{XkV|WL@4@+l>WIux|G+p#wSDQzXI74(U?zl=LmM;WZ@KY2YuD6b}_EC#prfu-Vc`sBq$86BBfm|i4N=KbExz9 zYn7Q}F+b+}i@?kdnXzNfn)g58aB^7n?pWTk#1)%b9>^#P(aQEFA?8gx9<`B_ zNB7!#&#VZ@?2rlMw!3@7T0S^Fe6Bh@tj-&LAnE$(`#|T6I?+)%D{BbMhU%^Bm8zZI zAN65`zX@5E&}WP_{nS5N1a5Z71b53)xB+g5^|)>h&hHZEy?b)B?gJ|$r_9@^5U4E& zj}a?(>{8JM)baBoBLW-z`VMTF4ncOv1a>QidL3;4Gm~F##Zb`oh&N?D4Yw#7;BBGX z%!n?UIgQ$-46X!YDmz+&wPiEzfR&XHy!>C7*Iv@wZ}4(5z9Tbm@_tP)v^vvIvpOmT z3!>3%zs=%JSYT5m33bVN#R62;5H`DAQR^R+*{azg)0*ait?wR=`o2kH?Hcdqp@+0H z@uf6sO~F$MqMxmFw+id58BS+>lW%*?6K>uO0WLda0=k_5-?e$W*W}fI*#|jjGi_5N zZt|?7GtFsAQsny`$!fj0B)PMRd_Exm`~S2EeUW11eD}gG{9G9JRPX zH?t`zC_YTad6COwe#wC4vYYz&^lvNzH#=m4yCwczT}0ik%inEgHYnW-DyJuN=cg=T zpsz_QiaVS9H;ASZ`{}sL2EXmIi{Q-;negsCj=LS5^1@x|;INH3t?5LET|+^4NOowV zNt2d;JIy4IGzsIzy64ZYir~x+`B>XHFENxBg?52C(uKlLHCu4HH6Fbp|FtCIpu0I@ ze!Fq(`Qb7n!r37c+&Ay6AY9(YkC!vyI?g=b_%d-Zu)yZ}fu7Bl)sSfu%Z(um>fvUb z^gSP1WRgO%2qX7zqe!QFH=_gJ6Hg{)s#Dwai zsFhH;GpteH^*Bd{U|%;igJbe@8or-O%9W%&W?1G*UVQ00RqfY3^J19-b9TstYHOUi zL9_X~b`m2Tiou_4XUGp!IxNzh1310jpc#^VVYR$~Oo~$?HP4Qguv~PuHY>Eb-*bj; zUjF&`1x?^GwoSUWcOS08&}*!7&#bi`BL(XQ@Ny+l6z4vYz;dBtH}8xi|L`Y1;o7Cq zjNFiE?Y34Ycd7>**s9}$`==*i# zGJ7#QTEernI(ZaN)@ituM(bHb>b0g7M!_8WPxU6~>PtsTNCIgnceLKyWhgq?As_wN zt^>StzwSE~l`Tm1P3B}tpqeqllt5DkFdDQX6vq0BOco`Ycr^UZKU8ElW{1oH*Kb?e zN~bDv&*SmB9g|h9^xCKa&_rI2>or-`78!roy@>wj<09auxgis{?NM@V<(}mc*c|na z$4sgL^8lvnoMB*#}+I{T);`?{r_8eEm*+Yr(a&E2bC2#f+m!o@-UUU zhO)aBb{;DsBRL>lMkZ+xJk4Q?hhys9uEeU+1D942X_SbRxZ<*-B~)A5?aipN4wskz zWw-70??QR+@c2+wII7?(rkicscjV{?&&OVJW}Q~itHEy|%B%L|;bWFI1_u-wH^Ob7 zS!Q4$J7ng{c31tLla<}jO79|s88||=tiEs=mguTvL){^hg=3X{V%O#W*T3ZnH}K}3 z&kp%`FniC~G3Nj}d-G*wE_xO6-28Z0dQ!l3r;K>?c-Ly|#0By-56ZyI4VhqWc^f-+ zaJQNn$1PSIVQTLpvUKKLP#wJ(=5uO%Ed`QFHA?d4J`JcpvM2&IJ7j{oeY0*%y>}k0 zs0oNR9^MMU`ur%yivhU;>IA9|Eg@QofEkk$k2?2RaJu%?=p$S`3+^-`q?29tdDzm3qFLkqhcQ)b62fLUslDfLc`Nm z^I|aJ+o=9w*G3+sZ*fs=p#u(^QnX~Jd{qm0#0hS#y0sGiT7wg&M-tt07Czw#+q+$+ChO$^17+O=q z;4v~X2)JB@jc3O-eP&r^4m~?$g0w9!q_yhLy>Nk5Ut_pw$k~`)p~G&)83$WU$yu|S zjH70X52{4&l^rc%*_IbBW63%wUj7-^mQ-;CTX#$CJ23`k5e-)$i4Zz-bg+x#d9Qdk zQsr8&5!M&}eA-=6Z}?45xN@7MBRgbTx$TSRI=YRKPv5~oUlCnmrh1}jscz!G2HYb2 z(ZV-|T42TkQ>}Zh{&bnC@$8WM@ofopEf&n8=1Ohk z(20NhjlZtYYc4xz@3*(fqWTX>+;rpbM#E*hWom3WZ$eQnh`>g5g*$H~ex({M3!zYz`#i*)t$HHp{ZV%4=& z|LAu<;UaEM%j}Q|<(9Z}1?BpnQ)1}F)~PPofEa=*lsEwkhYGP)RUzh@W++)F`b(_f zpZ<+yu;qqK*tR6|E7+P-4e?H_3X}f+$?18uXBG7c2c#8NXmumg_olOeTun(Y6JnsJ zvKU0?YSv}R-}QM#kYa zN8%g&6JJ#XX?DnjbXyy_wvvx`lwv(1ClfU1q3M;tk8Nt`JXF7FJ$lL2LsG*An*-{< z`lT{Zb3-Pm+amNesQnM2nQ>ljy^4GBbR~{jkcprO8>i}A;2HB%q^cCEuq>L zp>IZ&^+CS;pG0rHo|WA?xE~*6w|3osZ$C-5fBHz*hiB}>d%w{!x4G6V-DHSeAA^(# zk_dB)%51Np*y(&8S_+*|(KFvyhDMehGA;32a=^!raqxKGEPBFhIhY%~CnQG_8?%Z? zwj{{e#8yQuR~=)t?y4|<^^X_%X0t;k=r>)Q{a-5w4XzoT3?LGqIp0eC&q+a{21{^? zfIdRKFIBe4j+XG;ba8HW@GFBEv1L47I*l%Up^vve7egt~)aR(Erl`wZZV{40i^+=7 zO+{Ra&5_Z7u@sFTuERQ0xxPgd|hXavm&yE z?|7!j%4Ua5m_K@Z=$@UNpBx|dvM5G;0jH4#;Hf%f*lf`@oVLLU)Pm6XbLxapR^a)C zw-v#Y9n$;%>k_8zV{+5%I;igB^}JwSd7m_}q1FXg2<2Xzl%FoNGPA418|ju4#0W?9lL<7>jgWQ7e(`>hEtnlLA=^Hh9!2(I z(@RYvO1z}n<-x>J6CX8LwqCpnCrVLF%NveRqjuxK__c+Udvil3Xxlc+RcQ5kzXFmm zKZ2f9%{|SB7>V;3qy;z}5v!OW72`&G4UfBp`_AtuvT)fU6T0mz#v|xX=pG#&#~|mr zIjaq>_7xFzjwl$IUL2by$9Ta0cJCmqER;qKn^@PS1rD6I|I@!ws2!0VG$Gw`!LK5{ zb98U#QOTd7LT%*UMS7z$BSofStwU>c2)UDsG1lXBTos~DXGcqbwgLy&0Og&R*f_>B zrz{g&a_>I3aJa1u)5xg!V*3BXC4OL>mv1RkyVr)x3)#OpE!nMuJhnFc2QN4V1l{fy8k+>dw*Dg&D#;`!uq51fDapvXsPjw3H*!nxzp3!MI^@5?hQ zJ(?`ke>-XD1~bP*A7-w{`gG=9WLZZGI%EOvZ+u%3aM>XfxNSCheVg__(N#k=GP#wm zXR{d+yekk3(5I*fL;c2L&TId&P@<9@ZU5fPf2n5|oPX6y+&k`v*qWCfuzPl?aYI@@ z?RYd9xea`QjnRH`WH^shs-k&qd`M;;yY!cSw8$>a4w;s7M*?&A!K(jsS-9ZGH0qb; z^Hx-TMg|Z^!~nWb+hqEi%c!Xm^3K*%*rDK)Xpa9h-iS;%hO{@Aw{y4$mZ?t!>@4;`JH^(LV)MK~BN z@BwjDTT@a-zk-BC@sLUB+%y>dW)F+lo?raCifqs9kO|<9B=qRy&LInh{-T4DgSVb> zelI*0OuSZkooE=cOT)3Cu5YemI`sbk`siTaUabl(V$?b^0#6^Z|F;~$tNZli-u=EC zaAY=vAqtiC=(7x3<2Ui3<*d~hdP7%To8CYikH!1eA1tzX*&);7?Kng4SB9pZYYwVA zcc_QN-}d304b3><4I-$@X3$^ASHBlNk-}-bBoY^50&Mjr@>H^+x zPV46W`DwqcRCLWuH!DO;jZv|V)em%QOu`@+XU4>(0Woj+!1sYNl&$QL{U71`A5?UG z_MD;wwa`14CciDec zJz#2WP75_wqBVN6sEG(xuEFI*8NS5X`GLEy z`M?KnY4LPSJbnxjfoT3K&dHd18!p@BdOami#L~?pzC-x)-&6!)cE~*l?_3m@`VgX| znK7X!skSn~G@vG>S}|3CS8_*nUDKPIR8==}`cv;If-pN|Lb#Rbxq2ti)~<=YN?i`_ zoL1|@Jq0F0gs2nqmRbCmroX>sBN7rl22z>l3_Knx5mgF;QftMa0j*G0nfjUQ&3x@2|8+&cWrs}QwxdK>)~>%kq417_ zOnn#{7dS0mf)T+sH_f)DTo+ZQxYx@NezKz_L|akx>xi<-gD-!w&l?`LUT;7cE4Nnz zl@cF;>~`Xz$A;J4u%1pBkCEadeJZD+cF#m`GkW=_4vXx|?2u^{w;HIsG4AQGELGz* zmNhM1cG^WF<(2`UQFBo>98Ox@Of1p3=fAYfd}(&b1aixc*adR`z=tB}n6kYbfP+Tk zh3nlOv!(b-$l0cb4bv)ObSQt%Cly)B?2rlNbl-NZAFJmQ>gUwUh=EWL zCC}C{j02Ez$kL(fHMl_QG2!p~#XvJt;dY4){8MEfhS?z#$Spf|*9Ok4wP{#$u%8oV zW`jtvw`vLUg)pBi-Js7n95Ca-A5eenZ2y{GUcw#gnH@44YF^DvqG}9%wqo-h69=4{ zCipyhh*d3#W|W79XAYs8srco06oHx@GC{q$Y;&pVol|{lShVuonAWpk6Hc{hW<j*yodx*g0e#<#M`l%U58R$U2g7Kg0e{Ts-oNn1B6~PHWWu>OPugFr z!-b-{p)*W)(CO;wWi?5~PYC8Pq6L4lF& z#Ado;O)G7GtQ%UH6XbPNS#=RhdHXNDrO=t19W=q*d*<$`->LE}#-#h`rr^c$7MW>v zM4nHuz zmDy>0lDB{OqYJHLcF+X!3-+b;XV~m#&B5bk_l(XlGhHD@ii|+Z?p>=?j2T5!j5oB8 za;l8kokquH+RE7>6Y!fWdzaFqtil&LGX3#ZN{?v8`&$3N0*A>e311C_MKz-Qy6AeZ zKlFwo2U2#(1oX8JSNL9aeAp8C-^mw4c$+uIz^FEny&#NA`-jYcs@`BlQalI({^bbI zp~RqfcC>_J+b4L4BdZQHRybf2r!HbVaab`M#q#sij3QU9?c={poo>jd(+_$=V4F8) zcE|*5+b6gHw(^8+&JONAI6pW)JC4Tc3p@J+PEbW4^RshR*b$Ng9ciy>Rz_6dePh*t zfCORqI7wesW-vHAWJ36fk8Z(R2Zv`T_p08`IQ{x5O1>Dy)ip%!nW-iU##BINnITsy?C(6T>H9MXd1NZXCY*QvY~U<@PuJL zPOq2Z``lPp?Epq6*DDiu=gJ{q09o1v-?2m{8Z}k9U*l zLe9p`(w&b$Splp55g*;(JuiY$M>8*E!no}TTwpvu*c>K1&l3G_3~U{Jj9TNRCYuLT zQS0G+8G{K;m};p=ZiHw4*^d{2l^rs{y5TH&s1mXJj8s#t@VPY`{a^1$AsWtU{kCPLN7k>a6XsvsNa=*lY55J;;}LQV1!nSvChBNP4p%kO9+~2H-VxV7 zP{uYpJLI07+9BO@g&UusK1vgh=%=)q>^U|Kxyf<8BuK1jjO#_~uggn5xlBJQJ7j`4 zyp~g5+NzItnNL6*dY0>+a#oCS;!!f_=o#Pl2cPg<-8_P_LneURLFAlU zZ$(~4W1_%N|;-ShWXozb$_ZlZZqnEm#blKOBmnqn1hfH`rV!J(?^V7rnZ0B{i zz+)bpiq{9M)`WsqJ~lFO+@NPG0=zR#L&Tc73POqNE<0L+agl9pU}W8MbwhQ_WWT)1 z{G161fCwMSP}DRV0->J%!)e{U++JVCel0s>!gcYgd5CM@6szQ8I91SDRJ=-+5R85@ zwQxG%1VyRGBow|PMmz>8DxAwbeM=Lh%vllT8O`pn*R|AloQ z<#4PGq>!+n4aWvYtBdV3*(OylMIsm5kg8~}a)G6a=y!}~s3l*KSgC>X-!Wtsx{B!U>D%G-URsV=rMOlLdj&bv^AGW$!E_B9k35A%EvfTl0hNb_y5tsBiZj zYKAYJT*}(jTuWOGktHPA=VoOjQ_oO`ZKmYd4e>qmX|FG`@3TXu-{8%U`5SD~jsDAu zIQ)Xx0(V3bW$UqN0Gc#Q=~IJIMH6H+(AO}n8JE@nRYvAIJLD6=?q6-zS~T)aNY|Xh zcr!_=WLz`$Q+nX2@{6&KfLB?-{xk0^vf9}p6YOo*G*S<@^1ieE(oTP2(|@~Z;$x=8 zHVEbDnYOrcGTS4$GjU#OuhH3&F8Kr|p^^3U4;8_k9Wmj4+hgwXb#-2yHRosD)wl1A zkFJg{8rfl?3xd_WB<#a=L9chh!-H|(f%09!=%oBTc6ax71d69+B4w-=8e$0)3?{K|7Y7Wk6#-UE!yZ^x7?c4t_56o__ z2UzW4vl<;GlU|D7tF( z=2M1YGis&Ga8%Z3p-d#@$JDID!7zS;KVQbyJv(HvpP+vi_zIx)ej}_kAvr}KEE)@# zj4X6U+%Q(?*BY?#6a0Pi;@=jZp&-hAs;b^q{9NdL(BZ}RosI&JQsoLwCW z*uf{y4x4_4_dMZF$j^b1|D^xM?8vA!_>Xuvn4Kf-7RG^<>^Piv2*$+^@)KoB^4THN zkMWo<-keG=F)nw4F<6OUO?yQdBc8jH;h3vP_rJ})ypksUIjzFu|PzVlln zqP^Uml|l`>RVzCqjn`-xIrP_5L5+x-`gJMN4I&T`i~8MVGW_h23Eu0j@9;iFe=OWM znhZ%Yk8SCLF4ad-9~sLUo0M+MXA;}6JuFeq&W@JAY;{O3^7c^I&QT?P`D-0*`PEm! z)hEa8gHFt!0_y4E3w?Yf9&EVBs|NSq6NLDdD`*t_YUYOxt9tD*mBnF~g8$jKJ>e2= z%A?sKcYkiNgzM(~pnh=LFG7MTp5UUw;d9gnUH;-Lp)h7^k%BD>r}%ju>P$w zf-Bh}6V?mr-1Ze6OL^LC1ZDaKSG*2pc4&=ctP5{;M`63BFs+S-E=a1EGq4(IcQe$> zB3m^(WdBxWGM=$~8WBTUK#K*CpmDLOP2W;-(o!=?7%T&VNFBT%IVl3qkCzuR!MolY zys#?R+3uu)JtVCX;x4uPj@}UxuA96z`?(l-=I|#9MW-_NLw2+T>4Fz{Wk+t91v$6G z%b()2xcPv&M4s4;D{r=F^kepUOUzXz6>12{Nzd#{wpnwoXK0Z6)rzK*w{oxKDkkQVSSAv9E7PsG+7b!yGPk;es7Vr&JLMS zU%z_IPCmPbv(6k(RgJfInEe4AfY>IOh97qD(cdkHI}vJl)bL7Y1Ke{fN^b>C=p~wOK`3|N)nu`B0+eC3{1ZG zYf$hh@1Yl2Gk3>u%7~+GmzBh*OND;o*FPb?L@&w>nebguywfh+;#(c2zwN6KtX7b5 za3BOhG)jLmB+N{N#QRH`j(Cf%9gm0Dkkc|5Kz7K4aQ0}1yG7W%5MH;<`hfT51EW5N z$L+~hi?7?SH??KiqYc-ZP%{PyrXuwvwIYVb%n6jUY$Ab4rUw}zAb)!q34!d83G#Lt z`;l+(*1_2WItkBVR`rf5dpV;~Fyn~fNHlgmXS;QV;HVvI5L+Xb0@}DcJ%9Ai7TLGi zArr*yT>UkO503Ldj#YP5j*X--q@26YX?r?888bBlv-Rej2o5eM-K_4nen}CK*&!3i zorh0Ot=t$giL99(p|06Tsh~!pSebm)Jq434EuUOY0`}?CKUxG{cEkj4YasLJ8g}oI zofpP{@R$C^s98>Hn!%LUy}76-5pAInRTj+Q`OE4w~CZZ?msS4P#~ zT3PZc$bfo~b$_T8%-M=MXX##Xc^~NLu<9V5q`EvG#_ht-W8BSJ{11hE;BrHz1>GtG z+_+0OCY<{v-555RH1uR_b#P2B6Zxi%sUw1TjE?JMU?`^V6Su~H@@Y@Fb2q6#cE|*D zu&0q@u3%sWCl4)6&cl(~faNl*L$?%jm-gQt4joZ5Emk1+?0fiY_tc4mhEPnkKl?2rlWwHIQ}4Wfpn{lOWY zkeNhv8Xu_#N4+!wU;^0dpL?~pe=OraDU-fthunoLHCw&^e|xV0z2;-R@a72HMf?ez z@fmVOH?1AUamd6=Uuc)b-TvOgLa*QKpb6czW^4b&NVThJHC#tyLmMQ_X(caWAd<+Q zCfIhJ6ZyBt=alYx)85j5|_@*{HYr?#CZB5b(uOLgfxyzf*{1iQ+UNW ze!gNFfQI3NyZ!FJU20S222JaDZR+MS)RTMn`ulV=z7ZFVwh^=-^Rym?9lX&6tXB8p z)R2+>0O_ZIcxzq=Vs_93aceLB@XEggF(y*F5u?rY&CX{cA=)*C2FxVy3KV*ycGDqY zk0tGu5s5vca4}6H0qW^R@80V)!Z+-O#K8GFdf(*fI z=~(5zYyhCe7AGZV8ONaW!asbz>7A$`|ww z&3))BDcJD(plh{Ubqwc^DJ0sG8#3X#n2&ylYu|h{#R!$|k+qqH_z&q6g1K@c8z0_BHeg{s%$r4+0z+;94PeTy{SI2<+ z!W;ssPORX+{GK8JvqL6;7w@CAg54Fc^X!;=rcaD?Gcs(4YkU;yRpx?~olLeb+i-E5 z@UxPylqei#M@wKX-ba_gWSt-{f11%&Q$BrJm$^2pD*vma!tG(p{uUEq?qM697wU4n zbIgHM)y!;+*`T)d{_hI+zpqT;J~!+`^S?g0kMj*i|ACxi=Zi=FR<}O<-{*7pgUe2- ziDIQe$nKD-|Bol!wwtP9cF44CFH+4b+cv($oMTiiX+f7PG|C8zvUy^GYo>cK))A%) zqWbI6WcDping6rQBzJbmgz%!HZ8kZ)kB%fKL6{C!d z)^yX!NrVZh!PPS7Om?&c=c1`}6;9Sk6aVL%OPEtxFmVz#0LIZnAB3VyaHUxr4?~bG zmTO(6`I#Lu;hStLyN}|#T8KF~zssodE(P4aYrZyoOQ{U+37ZkX^+1W2AIoI0ny$jn zQBI9J(|v*8_U%Q!!0eC-@Yc#IX6G;MPgBSkUwz=5nYH0sZEWw?xoPTJ8Ab~mU4du@ z*;rGyi)q~nNPP6~DFQA#WCC|FPV*?ZGsZ6U-9C6=`O1bkO&E%}BrV^S6>pg};Y^Zj z{zi**(Uo9sPl_>|a)Ty-7nIDK0czPZD%g0d-mQ+F+r#Q2Hm6^2Bn%aSj~A)qa0?tV zU9z7=r@D0e^M1TgVIn(d!goy->W6O{iDwn0+}5&fxPJ5KLWBp1rKLHJCv0b-N2P2Rk1MIF!4;B z8sAzF2(Gz!mAM+Tqa{!mz5AO$WnGXj|5W^9sNqJC`&-#;=O=5x$gcfL#HJ_=dvJa;{M5?D1)u)1RpK zqD(D8my<6$Lf^0Zh9akHcE|+%qIYrQ@^*RozPw<9;!SQ`zha{Oqy`+4-74E|&;`;W z8+lf3yy@Z+`S^DgftejL!Mtd=?S{EOLx5yqv(1i427^w$oRyySUAJ5sYWK7jUH+8z2gswh%Y^@N^S8z#4!Va(4wAZw47oB{< zd*5GVH)n@Tz%QsbyMbpW57^;jS?;Dj5NgCrU+=VDwV@!PWY7!-LmuX_&IhG<4RvnN z1oxtD_uz3J2csD(MRhcx8jZrr|dJZ;C8ncF2V2 zLe)GFojf=v@zMuL34B2N8h?>|8xOtcL6O$f-5wFLJLK$W3CK44-U7)=PA5G4=;w1v za{}^_HPDYGWJZx7ZwyZpO~j=gcAchw^ua}zC_7|AwyjQGLbi$t_virnqI!h^)kkBz zeGw7`Ei!2}L!jZjG~@F=){F63&2*kRe1~O(ptD0JeA}s?EBMZOxoUIG*|4YJ+NTpg z<}dD1KVNd%WGU6Sw^v*d=9?jlul?&qmMuGEg0-yzUxsyfT%Ep{7Vd?^K1D~9Kjzn| z*wA8_t;f_XHPMJngoHBL37Dr`vo>ewu=KZ-U^@j+ly?@?2rl9w%~LL*R$)a69ty49dz~*nNV#Do|nF_W7YX+0oKj&`RU1FZ;yqHFMA&@6fQN^U@G!Wi;B%GoQA|;EtPcV zY0<}a{N+)RBP2Uy4@6Fw=Z8_uJvjDi>s0)9esQ$NQRfI=8S&s*nk!)w%x$;q(q?`CgX-aR@Pb{2dUn&PDQPy}YMzzgBFl4pWs}O3cYKL6 zNwg$Iam>_Gtl{r1128*e9=M*>qtP~}CNv#{U&_iWN6UJ_&wlMHpsqbMTW$tq-dzdz z%UCOBhwK5Drv^aGi==_x5uXt{EaQ=vwc6$!*8E-1n4`kj@U7z-{}S zm$qf@{EF`deU89tn*cij~ag z$MVOSoVgmKdeXZdI#rw~1_a8qz_OzyI5)h%H$9VA#^CC*kvm}QO5ZvJ7ikK8(!m=8ee;#bSm=tBSV@UG9lbHF>hSQi^tv0Rh&6#{1a^&a!*Eip#-p+P%(=u;msdW;#!z5T<3 z`-k_NFf{K2>{S$00i=2r3K@<=h}v_@&X9_`qV=pML_1$}rHh~Y)FR(#cE|*GOVr*X zyBTikKJ2_s!h=V5fixUO8WtMwc6xZMzzuwIEnw5c8@$;lcYkG>&O>&{tQ~ym3H;(t zl&fYb1t;WyClRe>MZ{LuD1RAV!i*@$G47~2VvoU z+?+pua{6os>!z79-IXyV>u9S`xE^uZGlbiGVe81r*wGV<(b3DKpO;bk;g z4L{{+?ij409H(063y<6(onYck=5jr$6i3ITj=GXXq0zGJkO|(WUX-t|!rM1tW_)KL zkMVdPN^pj}-x~G@bw!hcNR{h@A2Ugnu3~Yk%;}OHGGV)PLte#pFIWOcI_jqf^tkHg zz8YySCzPG7bajRZG2(frKs(jubdnPM)4Y|Z(q1xw+;Kwx+N%rQ9oaz>+^tyS)$i`s z!RhK=-#x()U`@56QOcgz236&!j2s$zF`-r!(>`niG;t#rf1%8%e0IovD|vQ)Qs3q4 za9AIl+&@3Ocla$$7`p5}L#3vZA=TJ0a`50-qhR4^D|VBQqwfIk+`2YhvF3MwMv+S~ zJ7fa;NjpBt{<_9gQz=w%@j*xCrmJPhrx_VUr_+olS4n|vq5EFu6wQv7Fm0#BAK9C) z(Bj6G%7e81R=Q(&4Xs*N9eOam>(oG@I~VOnr8T>Zg<`xMhdTTFLHh zeDCOf-zR8iVlN{*jvI>Pb&DUkjQ_!S^?p?9Xj=8sQ72FJd%w5HCzu^FALO1xReuSU zEZE5AA{Z-=WZjc6@bCf|cXJXdj;mESzI(sS^_U$pf!vC7-1tp)EBW}xs}r>Klp=9B zjDa84&S)ev3u<1CRi?5QiR!9*O1`m7lLjp(FJywf^|sG>Ls1|wH$Omh3^@u0pY+CK zu-uGDFzwOr;dGa{8*l&TUsmYi&kmXpZTXxe#qSf>j&Y^qJ3I635sTjGv@P9ZT4D}6J6Zy>Ws6(^la+2y778_{kh#*~vm*}5)`pB)nOgAe zSP%npgwn5wMCR|85lqVtnb3_N-tCW|tIwpOCr2+HpWHk2WFn1Q9fwPhbM%Fs??K}_ zI530Ra;-dL8cUIi-9w{|!8%^v#*r5xW7x^y|1Y2UgsYkKjdMdLyxW}g5xikV+IL6j zr-ddX4aLPEu8EnXh80r0ccYMs6>3J=Q$N1DKl-mreRsJ*6S!?s^C-B(v--3-Km3-x zvt~`JBM%CPO)n`nYLj-zR+sh-6KM=&az6&NH;mA~{gp*lFgs)dxqbgW0acq*qGttQ!W?aVNrk`>h{VXeG0QCXm}Y)FU9< z2glv4IkKAXu{}Ax*Dnyy2hWACE!B*wmh$8%H1Ad0=xr`Ib(!cL*Ib{v6X=`DXd`5Y zOi;HE{zsr5nNv9G`y{<5Tnjie2ti&N^-KkIRe3;p2zz#T3cD$ZI>zbxhHojdSF=MV zY+H)xBiJrZ);@r9ssXZ-(1?|wdJb2Ocj0_efA{<5r4>Va~+sRY}n!W7C-W?)Q`dnHzG?$9NUw zex-Qs+|fI279LVauvEphG{&{T?8%t8js}&%71kTEVW(yMQnEuPs5_FK9Bu|yY(1%F zh@Hf$8sfK0=5iry;gv5F;6cj%2!SZjHDxGm*&!3UEus19=h(f6cV?$#51MsL4c31K zvzYi8V#Ati^+F^Pi_T1$a~5pG64_~Xv;=9}_`U&BR*w4eA5h*-u-*i*dll}VQ64Oj zpj7fFYTtynJ?IHU&o%nzA@6Ut74be?9Aa~R&nG40nwqY|aeG+kfjj3S~CgK@-evLviP7lHbhCRx!QC z6^1&hx|FUl$YiK-%nor;&F9z}p(RTusbkUpmai_ds@WkE-0fWQPPn~!Ysy)9HP90u z#X=gP8>oJV%NBB&F_-~X=79SI&|5;lPSAT3TF(yFmRvm@ zM|C_gNkbVphmC~{Bd+DB8LcOqfrU4&i)`oYkO}3EcgdwgDlfHdqs6LQNBEbRG)you}QhmVTD%?_F1Zks$Fmg|#y2kLeqB)dnOh9STT&wn=cDp`?G(kJjp zVM9N4tIC@YbAylD^#^|HxCpT9kO^2v$aNyN)#>rAv*zC6CDWswNuN3orwW!h6VGO9 z9vWGytHy_Lk!If1v6)5ymr44vqa{dNQtl&gvJ$?Dp^P#4BjxcxzgZq>>c zK*LJf_#FMYGI@G-$b|2yhegS2__{MR_U8TS&fa3y)Pw@cswx&1JOW2p9g%nN{Ax%F z5;%6TnK0_Uy1(+iBAYWiWJ0>VHLf8gE7(Sr=kR!s!q>p)Nh`P07YKz!a|}e2^s=Tz zatx;Ea~M@0kGpR9uarrGvO^}6TeknBDEnVtD&=HzwHw(!b0v58n`&<8ukf!LD#(sj zsASVxIl08kD?8f0rQ2MFuMB?teAkzF>BfJoZ_=eNgTZecj-~^YF~Y$s?4y{Jiiu2f z&?;I~qT?p4%FrFML#Bn@wnT2+t(&J<{?{p?A>J0E$tpVr;@td3yz9{duR|#)gg&Ac z9e97+;t4l$dU)oBOn5(RyT;Gm+j$|HYT!4*=&y3An-k{zC;J{^lf^mehUacbxG1jsu;nBNY2L=r&4!BlGi5Hp?2rl5 z$6Q*LTjW;yH6b+GC;{p+N>xH65DVU>)8AfmX=FP>T*iRWvU?%@^*{N9i?rEi*&!34 zttL*lNUM_v$Ln5Z)MVmrCXg^vUgL^pk`gT=yhT}2ki*dvo*K~@cC?J|Dl>MH9rDI~ z*NH#G;O}m>SEtq7nW32ZZD(_bfayb;9rPWY3;HtyoY{3SW^e!42Nin1X9rEFwwff} zLLD8hg4kW1zUlr8hdW!pB-BSt80d*&mimr#URK%`Bsej=7g(j(>`HE3^|z35Uv|iS zuzEV$j9tcvq*3)*Xb@7f=thG0O^}Xf3^sPAS7fT=`2O`}=p)%7Z`^}c%c}J*)!dmt z17YULDN{62vYeWiS;YHoJeldeC@O{IXka>hO_8G_J7h1Wo^>LJs@uS$m8$Iw2!aUm zMk)VD_slssG`~zDp*t&{x?2QOcF2Tj%RG4aRr%||8PS05VBbxuVA@IwVZeoEM5>83 z-O50WkvmGvM8k)Ui+NNJx~|dBd|44_*&!3OtsO`Q?YMb9Y6y#Ac9R32Tj2in{=c(2yOkaGrhS;x1e=Y(Wj%E=`ad z7NDqX{-{I_n;k6yx+v;w*5v}wt+#!}d%o_yU-r&-ebvtES2KV+?ux`lNQW?y{r}oK z*C5%hs*c|zHwj5@z>ow%p|~JeL33x$Ieq#(62dbt?qw1r5JH4%&#PxTx2JpP?wMqA zrFaA%DY`5_3>Jc*_`xaxtrFByOG*nxd=x^drBxP|ilvkmmSTY)^ozf>&z>_q=gv93 z@4fp2rY;2`ujw`a-skMS*INJe|0D9v@BQcF~nY0bi?i&TH>&+RbHj6-UC)$qf4 z3%1(Jc4r&trlcr8_BM(kGZ532jYhC3nWD>8DaM8-#$KJo3Pv)NIto|ZbrPmnV(eDc z9NC{**gV)cj6-U$Romx$u+?*_k*<_);86-Au;&qmL8VAWAp$Cqlpa0q9JyPtuwA5o zRv+N@<_DG8aoEO`!5!D{D+X@QE5m8PXWhbtaqali0RmxM9L209m%1 zDAy>YM(8e9bPm>QwFk@#JOVw06|*oxZPbI!l5xFssm(!{^L<7dsG@o_S4(Q(89P2u z#t}8xN(8R9oaHlWaQ5NnMe8D99xkJV%2WU)1&%`;Twpv<{6V3XhgfBFe{{;q<{}ni zMji{8bpMQOi~y^a;6 z#vwJ*ih_0YV$O=qtzj}NR@zPrjt1O4DB586!6E2Ucn#xBXgV>s8t!svsUd186{0i0 zVg;pfM2+%LhQ9EA0=g9$4=L0BR8%la;%p~H;*#dFilhK6EQA#=Mu=Fpp3~pF(Na;* zIH(3!_4jK_DBzk;LFU{e%)J=)lip0|2 zRQs1z)Aff#$DlBxPRx&2wy}`&Xu6Bm8x#flicTD&Du%Us!~mdF`cmjcP;~92g2vG_ zq>6>Rzg8O3KD3qvqmiE$__2^7!THa59O-t`{$|P|mLnc9zb;c_DiL(9wep%7ht%jQ z)pbGF-T{`fIP+6$|LgGY;AONXq)j z4YRE+MGY}V6yK$`hgJ=YO6E&Q6sTe{VndZB@d6jYNJYo{<=?l0%s8Y*cH{o#xqZh7 z3r=Eu?Z~+U*)t%(c-S(*2+lzO5SXLjrp1@t|EobM$gXAvQ#mGIClNb{sg#W4K!O0hf1YX)yXK`kfUE`Y%T;j@=yM}Q{ zy>eAgv;--CT-uYTbcc?*gPM9ubtGkrfQ^ISud zE5mIuoh)Sya*0Pp7Aq9D?{W3z`b%q(fzh189mBq07E!uf_1nJuOIB9RIAj~TjUqeS zOP8%3rV;?csE@9I@C}3|m^+Y1Fz?GS?h_X!W-8Rk#O;=9=cXYwx++syT)Xk+X1gMi z!=^)#4yAZT1|fW+5`=*nllMrJ;{PJfwnabo)I<42OIfsO$W~yZ{$RcBY!;DD%Bh@} znWSAf45GOLdDkH`W6}kKQ@U9U*~#Yk(dSut9*sk4V3i}?!iuRW9`uh!$!OL~M#*q@ zI%uWFPCp(%D(-{*?yKPS}by&K{Vnc@nA~ai0bPr=i#4ZDbprU}(03Ld*cGNR= zNHq13TPyT{1T0f<-V8#x+Cd9ZCtP&ER*6%{%>9ETk3t>7IwJypqNt zHLj!XWl-i@&GslJ$M}oL2?0qBVgrLm!W)u%8O1HQl8^&XFOePft9H&nSaDxVCMV`xCC9U$JF!&hTGpwrI+Sd@qe2(&xjW@W{ULuzP8 zZK7w3$q1|ZWQ)m;CkC5K?`R2%)%emUoPC&IGkuLPr=OGDF>HinNyuEUETme&KX{E5 zn8qPB%&Kp^u!0)qWRS173gt2ahn%WVs^L`9#&J3<8l)TEHbht`c~MCh%T`&%`){xU z&p4!pSG9K*;MFs~wg(mQY#|&%VL+LisXuYnm9DCbbS&(6+F6j6$pi)B?|$40MB|Ve zVnuDJK_vB)M|9jEHKWgY46)(cpdgR=S(tHeAt_>~q1KF=eg=CUtTMJE0Ka^@6@bPe zHNfr>=e(@p0l?mcEU5>{Qck_B>x#Tuzyr*nP>KSu4DUijavX{;h2p+_@%>g18;8`0 ztB&2`k`6|_yx1y6Iro@BG3`xII^1l1KcV74ZKhw2(Uwo2n|WmFfRqCnIO}%so5b4> zpHp?-|HDmIkQ#^7NRPVAD3#fz?z#z_;0Qr-)H^YCNKtjpU>bEv;_yo*Gls|r=|j}m zZe9DS4_ESkji zd`OjoSt$;C1Sf)+{S>{mkQs6$tU}&K@_iR?Yh}=J`oHe5)nqjds=*y~DsF8vKautJ zB@bE?31$rt$$@r+NW9$Nkid!CptPgDmZCc}sN$csH> zeBPMl51D7Av?}A&IYlSeQmer@n#NFZ@}=i()R)W{ltf;HoK<3> z3kxw?E-nQfWQWEpE7UE>_hjRL<&{<*KI4!_gEAPniXdQcV>!?$bs5A0*y%T|TG~2v;N?Lzw1Wg)tyeY~P)Sni3oRCeFct8afR{+F5YlCZ z5|xIjaHHsD=P7=m*!bb_Elc)6I~Nh-kV|p3jEA6MkFOhATQP;(0#A?_at6bU0pt^U zLLM@sT%}$0DE`iWT6q+WLuyRDh-BN;0{j-u0XDQ419;1y#qS`~$!vXfpL zht#ktLVCG>$RAr*irE1KDq-Dm>q8Aem_lUq5GlL(wRHM;cjHb&rCA=p&<w}X>bsL;+2yE?CH;)v~<@n4yr-b&Ms&gL(-C# z9X`#t=msd2(;lX{L17L)4YUS&0T6k~yx>ey`cP%%?|-(Hox(U|E2fr}qE{SuU`8UB zle&J|iMvQ}0nmkRh$3wudKVNa)qux8+NmKMht!x3-{DQ>PhlDR%x*(M^BI4p(T~S{ zj23H_Nq-zfP?E*hQr@Vi>`Xxzht!~+yO>-p;eXP1^053v>kxxOCf*QDKq`*aq4bZk ztcwl=O}2m{stq=maWswOpp+qpQ<)DxGbmk$FBJ=!{_<;Xjz>(;4INxYU6D9u+=v1j zBhl2lT~s^pjCE-Ks;%OsZ@$EJ(?TSsA@#adqFd^iT0jTIq-?Pvyd_81ZX*>`p1zpI z45}AAf{;Q{@hiHpLQ%pA@Hp&4VZe1%B`Cjsza5mOAvMYaQpUm4DOjm*n_m%Wk7g)1 zqqhkql)?>HDfmxl%F=_1k!WP^q~X?8o8)iUnJYF9sc}~N@%7bQTto8zvqw$Xay%Cd z7-5=1S(r;N+Elo76tR-hL2G`j+IN?slX zvCm5@UwucfL*Dc6X zzS=lmWXFonIHU=*mQ;+1E=KY2MV1yHDZdz<(1oRY0#_dAg#`9Lsz>T!eDtK1y~8-9 zhF0I`OV#$aM-kcJ6ncv?7JwCibO&O>ToH>7GW>86`xM@gX;JHS>xvan#vwJRYUEzO zVHNSCw=vrsa#7r((ze@p3C7DL7PfzZC>q@^kFpK=x0um$G2|gd$(-3`NUpMnyx&f; zX&h1`thnSFVIOPOUOvdE)VGv7MDK`sclO4NVLe9baS9}*VTc#zBTO?lVoTv55Mamc zbi#~7E(g}~qM@};|3(-{XkRkffTfu)0#yM?HjFC>SVGPX)H8Xz9m8(pkQ!M1B-l!fte`ynnL_4IAj{!nQeLLMWao4?4yiHKPv-&rsVA14&g|!059#bt;zf%K zJ#J?65OYT100CJ*y-sRht!ZN!L)ry)!e>mfbAM8i(QnbAnoHPBRj{5uiJSXO+#vo^{jbw zpsZBePGyuC(wOcfL}BiSkWUfNVo*6_+$-hw!3B#q4`zU%`l6kO(Kuuis&TgLV$U1I zxy!DO7d(p-;9r2n1mzjfQQx44$wcZX>v5adk@p+L3GZ{-43JXV^ zpcCT^78N-drknz5a$USr6~*Hh$5viH^5(C?MX`c;N!fog_jX zjLtfgZjpCJwG|x%*(x8g)0;33sX^6`wUR*Edd-I>>i3xdg<1p)B10a4T^}z$7A!+| z9GMZa7dlUZ>T7nkNaK)8P_=B0^coPiDVR2s?Y=`>g^LH|6;a!u1I`jc|Aq4*s7|?7 z9zf%e4G*AH1yp^Yu@T{C5#F?Yxi>QRA-7@fim+kD2AvZCJ`R!yN;Jww*VZ_i##4>; z=xwmVW7LxfLc~LmOpM{G#EcElpYR|f;ylk&#vi8})#OAW{LsBt*3dYlMp#cx57tm!x~8qqC^b3?S&X>Qa_8!Z zLm)>2t428-b}eIA9xVVYzvN8Vup{AS98%+}UnS4O*Rp)b&r%cT#m8}nQa^7ZwzV=* zP725Ni6=l8?f*hiCw};PE6bNh!>M=i(+^pBSdByKC9P&!+ra8=rv)k38k&h*Z15$L zq(R0ah1`^R3F8WIB16<3uvnH|$G4+ZZX8l0t!H?XV!DL_Nh?tcx;}zAP#_T?$+1~N zK1ar|X_$J59>MqgV!T|E&{n-;1|wKCgkyw2cn*#!?O zvs|wcB*zPZ*}9zkm9(MLK|xJsoDzlZT048GaYz$%yHB*CbI`V?aibi~U|I-EoVgFQ z^ZV3}5;DJ(sWcu@S-Y>=Sv%v98eP4=$eQ(*4*T+Nd8*5{AgvS`9ks_fvjL{F*UY2-#+=oxE5AtLIuK?s4G-7$Y_)t87bsM zAUS6iFN_>D&hP_vm>9+(m*Q&K`Puei#y}@5s0ihhOc>iGxqCqXB*WwssCJ8{g?h&`NTMkXZVny@$ygbY$}XQGlNyvXV}x19e zL8@veU$WC>G!Ci3)k{M=MaG8Mn-(LskYd={;|ZAYN9Tl=92Z1!LE-8T^$wPrD3T*0 ziDw^UdWw*7o1KT!IHW1OmUqs@U7GPc+`oix$Ce+{6PIaG26!13LTr(Fnn3jq)*iDn zm~0$U!>boAA1?;$8#6BcvWT*FZTT8f2DIi-9}Mu8@-v^6ghUe;al!;2BJ5PA^Vm<+ z)46Hq=`;>$hOa#z&7+Qs?F#J!N(tni=$YZ8i_``hpOg{sdPB61hKZbU-?k(3X&h4H ztL3A~gvkw29+E3_Gk*=g^N`~#$G(kf7I_T0DXkzn2TYaY3J?b%OkK5SzQfL*X&h1m ztE4H)d{imF^vb8R?L^2)vb&5vg?Lg31rNi-2zgc(PqG09IhgSE8Ecj$d+tlETuY2Y z9=&7`P9{aRl#r2Qq(Nhq8zVlt%s*nx+6@@V_Rz_aQ5(uzyoI5fl8NYPoMP$AH=OtFw%Q&QlRV&w3 zW#nEa3hb@9sYrLiSL2E$M>R^p0R}cKBCH`41(^qcae}z6)L2jd!ndwGW(A*dP>rv4 z?JAy_A$Hk1OT?|8?34nr7qM<+-YEjlV!zF_O-$Pg7Y<56yu=wb(Ij| zVX6xsacUvtFeu2Ptbr5-!>N$2Q;Km^}s@jHeA(E;h=|yrzEslviQZrZ!q@xZS zhZ91>#;y~RD!es8_c}X+D8?Z*x>}+5uzqaY(F|=61|b9mDHAid0jr$*6{8L=V-ED1 z5c@}I+*O6TzqIo_8i&-_Y83-B{Z!Tjg)-I*-`E098U;E`oHJ}TV!YwwK9o@8q;iGp z^>!#i#vwJX+AVQ&d%}M=k}3N~O9|XXeG$@aaWCVL%Yc#pk6W+6eI|T6C>kIKE95RN_gztq>M+Owd4`S!CVDtDD30=H?A)h} zLuzof-Eq8y4E5m2)_OsLW3kI%C>MCHh8@^3n4!>-fvfDt3~P5pN|jVpmh5`R$}wgf z@@Q1js2&cbPS8Ae7&17IAympVip-gF5Mv@v6PG8E(+mn1eiQIx)Pwl>@3w-D!+&{IkyKay$ysk6`Q2~*fy4Mo=x>rL)f6oj#8o3w^G0}$00L==@iVXB(_W}PpK{bp0!bX?LIGP4hyGV)G>zX>c?&s+0v|98A z8QL1KSv|4k4R9`jS%6kHqpmm*qiI9EqeJSch!Q7Xe2M7Fri*b%jjnc)s-tW6=tYbN z31#YrBwnP+IM*__z#S2Kff!`7q9|5!s>vq*lpSWgaY&7=mVVJLJ2ahl9*IE2=NWh{ z7`~Glw+K`Es0>lmU|t9gB!yvIXG^EePuZD1G!CgD-MlYjR=!ozA}dL`j>s5L<6M-A zW_H}e$`hTCG++QbM-7XMl<+;kmZP!6WSb}i*m)!*04))8un!hY%OpAq6FS$9tBUp9OoRM!;XP)(NAdB z^S_SD)Oz{1l0B$T3)xumMR6BSMpcAqOTj)m`)D zRvuO3kQ(0YM_kOx_whnnH9l0k2$7sHgP`S!t855YJg3bTNT$H?!KsM~r~v)izgPik z98v>aH2`X)<3taUCo)8&75VTKIumO5aMZ*^LLSyGqYix>gB2sKZ>P#^98zPg@e&sH zAA|lB!X*$}QKR*E6ZU-jGz9nm9;busqr;m7p0RF;BUcIz^5RH^Tr91zdBC43Af@0%BX;btGxWO^l0eu0FIa0cScBFuN zft|h6IHU$woB3M=cgXCqt>g|bEk~#YN@xrxgHkjcD1>W6OdcUK#0d3rM@2eLVBTrx zT4Nki!)!hx3`CP)98$myX^*2)%7B=Ij5ntq+=&h)P}Vbot%1~p;QNA|Wi$@C9N%*J zBnVH8pmvdF&k?oavK`Y^W+Q5mj6-UC&6&Pg-_^%79=A2d z!*C8^Y}3d5AcACrJxajgLG6aNIf~^1@jvX0Y8r<$fOyb?Z9e9av6aDo=nfR3;5#_D zWYI$j&>V{UDI#Y4KPQK3eEpT3Y}h!YhS+>mnDi_AOo}n+PD&pMgAFztdv*IVy}pE(K`Px}Oq2^5}3RI4vm1$@ETeHq4Y6|6Ij;uJ-1tN?j^0+%P9n}=G@>HTAsBRW?0`|_BabV0@`ix5}*pg*+*ZT6cW>Qb;0>p zI}ID-kjr05E&$VFY0IrIeotKy*(h+8asJM4LO~(qu6I0Qlo3GW-J?? zM~XuH@Gx4#M!rLThJG+AO|;#pm%2DjI-$$Vta=GwW=9|1IHU&GoaW0mvt&vc@)Ely zWAbQQLn)DgLhAUe90c-CKM>1_5P?i}TJ+v$<M9PQ}CvA1M4;9g4IDw*`v595KoWq zv1Ghv98`mBzSh*{Vw+z`Cq*F{@)zPALh+(Z!`&Mek@!gokAX22h#O^Z|7Y#Ikj5c3 z%G#8J;reA{_xN8iAR74&okRHlXe7{KgOefp`FIARhAMC3>0kS#t@Sevx?Rk#n@yN| zO=d;D_w@G4jpYAV9{;Ul*ZeAfxcr~X-@xT>;PN-{|KJ;VYI@~0-S?$Gwt=?LthYXx zZEUB#{RzVN9{ST*}?48ut+AO z!+${p{+<4G72BnJy1JPRMtjqr*jph~c10QM-Fvsquj1d?n?I-iZ_ckBjdR=-^S$|P zYj>{&Z{2)&^TE4+=*hLm-mv@N==AW>(b`UO-@_Y^-oLh!zU|(vbp5Wo_U2b@PlkK* z(;Ktd*7T0mRq>fDfv%v5by2_Z{F-t47?Krx>UW)PB>X;os7ZJ>;>b>hMl+$0N`4Sg zDMGyg4;VK@&Vh_4+M6Gji}v398d2mL%=R{}o}ayh->v+V>8f%3?d_G%+;;`5e#bvW zM=dH#@x}{U^^?oK!~GThv*#bI@Rcm$>A(KF6UP?T^o478-}Cr`!_&j;zWB-HkvH#T zqqF`4cdzZTzO3W%QL@SRc4G1A+M8e3-yRNomJ9%VlBfoFBlRmcQJ@A&d zKe_f=dAzPq_}FC9Q%{*!t)%*!Q~jjL)jS)cz=N}bJapTuk|uSWSMS{A@6B(3!%=>Q zJlV(gpV(JapLO_&l@B}oj`0#IB9{6UUcCQf3lI6?_p&#?u|FOT$B*+VL;sDAtNq7t zD$8V27V1*k|*Td=hTzU2i`Q@PN&&qz(^$Wl;y{U=D*;fpXSrXll5dYcv6XQ z=ucZQ|1^Gj%6Fx<+VWRfHUDAtQ#`cA4$q>FeTVsvmp^uHRlQ8vE?>>fsEHJV9lq?v zkGz1@tNnbnKc0AS;al39KUo^qc+ClOZ`UGN9@hN3p2Fs;?Q%wc9HPaCrwxPU!T$T zW-2@0#>?h+UF0X0AH_DD;Mlf4DhmDe-FK0nSAER6RlOUZNa=ARI-@`M{)_zJ{ZBc! zTK*Z!{Vb^yms{7J7x}632`si6g)Bem-i!Psxss}nIk(D9j^FMCf2Viw8{V_*bGg(U zm;C>VYrlPr|6cyj2~thdvIzUtwep9Q3$xG*Axto9#~cqcF!;|w z7xiKXztLFcn~?;I#lPK#=1685O*hAG(m+@SJ`RZ8pqX;p1%)if=~8fQfx}4 zVP*CNrOp!Bd2$>KBBv;d(5mb(1cAQ{B2QVzqtS|&|&bOX(Qw+QO%~V4f_LzwI49>2s1E?;U3#~ZH%L70Jl^C zC?$;ggY}i&&0)*)g&G;k?Xcin7R1344u|hJj}Y4c4bw)L^D>3Yf~aadX~j`rEwy3YtpP#k^37v^giQ|8psU* zL4`M2r{dkXAWW_I9kdltgGSXF8wU(QV|>U1v@;Ny=!%~;gJ6nA^oSkl6XTE?$Md9g be+t=w&EgdGmGPuFH7RzcM?m6?zxw|Gu61Qf literal 0 HcmV?d00001 diff --git a/notebooks/compare_benchmarks.ipynb b/notebooks/compare_benchmarks.ipynb new file mode 100644 index 0000000..beec5c8 --- /dev/null +++ b/notebooks/compare_benchmarks.ipynb @@ -0,0 +1,305 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "7d96c171", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/mnt/sdd1/atharvas/formulacode/datasmith\n" + ] + } + ], + "source": [ + "# replication experiment\n", + "%cd /mnt/sdd1/atharvas/formulacode/datasmith\n", + "from datasmith.benchmark.collection import BenchmarkCollection" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "d23fc794", + "metadata": {}, + "outputs": [], + "source": [ + "all_benchmarks_meas1 = BenchmarkCollection.load(\"artifacts/raw/downloads/sklearn/dashboard.fc.pkl\")\n", + "all_benchmarks_meas2 = BenchmarkCollection.load(\n", + " \"artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "809c8940", + "metadata": {}, + "outputs": [], + "source": [ + "# list(all_benchmarks_meas1.benchmarks.hash.unique())" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "d87d14d4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0 1519e5b265c4ce087656b260eb08663280bef289\n", + "1 5fb9d486e86172a5f2c9453882838047480701c2\n", + "2 NaN\n", + "3 e53407e2db30b59f8cc32269be92f2e08359a189\n", + "4 NaN\n", + " ... \n", + "35760 2ca6d4d2fd53a53f92f8b220edee862553b76ffa\n", + "35761 NaN\n", + "35762 ec1be32e34369c3021569da76a6929497d303301\n", + "35763 107e00914692951944e64077398d74b1c5d761c7\n", + "35764 NaN\n", + "Name: hash, Length: 35765, dtype: object" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# all_benchmarks_meas2.benchmarks\n", + "all_benchmarks_meas2.summaries[\"hash\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f9c88cc", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAHqCAYAAAAZLi26AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAA3PNJREFUeJzs3Xd8FHX6B/DP7qZseiBAEnoXQugI0iwcShRETqUpAoqc5VARu+cBnp6AFTxQTn5nbwgiomgQUJSOSg2hG4qQQgjpfXd+f+zOZLbP7M5mUz7v1ysvyGZ2dzbZMs88z/d5dIIgCCAiIiIiIiIizekDvQNEREREREREDRWDbiIiIiIiIiI/YdBNRERERERE5CcMuomIiIiIiIj8hEE3ERERERERkZ8w6CYiIiIiIiLyEwbdRERERERERH7CoJuIiIiIiIjITxh0ExEREREREfkJg24iImowtmzZAp1Oh9WrVwd6VxSZP38+dDodcnNz/XYf06dPR/v27f12++R/Op0O8+fPV3Ud/t2JiOoOBt1ERKTI+++/D51OZ/PVokULXHfddfj+++8DvXuNiv3fwdXXli1bAr2rLp09exb3338/2rdvj9DQULRo0QLjxo3D9u3bA71rTtn/bqOjo3HNNddg/fr1gd41IiKq44ICvQNERFS//Otf/0KHDh0gCAKys7Px/vvv46abbsI333yDMWPGBHr3GoWPPvrI5vsPP/wQGzdudLi8e/fuWLFiBcxmc23unkfbt2/HTTfdBAC49957kZSUhKysLLz//vsYPnw4lixZgoceeijAe+no+uuvx9SpUyEIAs6cOYO3334bN998M77//nuMGjXKb/dbVlaGoCB1h2x18e9ORNRYMegmIiJVbrzxRgwYMED6fsaMGYiPj8dnn33GoFuhkpISREREeH39KVOm2Hy/a9cubNy40eHyuujy5cu4/fbbERYWhu3bt6NTp07Sz+bMmYNRo0Zh9uzZ6N+/P4YMGVJr+1VeXo6QkBDo9a6LALt27WrzO77tttuQlJSEJUuW+DXoNhqNqq8THBzshz0hIiJvsLyciIh8Ehsbi7CwMIdMnNlsxuLFi9GjRw8YjUbEx8fjvvvuw+XLl222a9++PcaMGYNt27Zh4MCBMBqN6NixIz788EOH+8rPz8ejjz4qlSS3bt0aU6dOdVgTbTab8e9//xutW7eG0WjEX/7yF5w8edJmm2uvvRbJyck4ePAgrrnmGoSHh6Nz587SevCff/4ZgwYNQlhYGK644gps2rTJ5vpnzpzBgw8+iCuuuAJhYWGIi4vD+PHjcfr0aZvtxLL8n3/+GQ8++CBatGiB1q1bu/x9njlzBp07d0ZycjKys7NdbqeU/dre06dPQ6fT4dVXX8WyZcvQsWNHhIeH44YbbsC5c+cgCAJeeOEFtG7dGmFhYbjllluQl5fncLvff/89hg8fjoiICERFRWH06NE4fPiwx/3573//i6ysLLzyyis2ATcAhIWF4YMPPoBOp8O//vUvAMBvv/0GnU6HDz74wOG2NmzYAJ1Oh2+//Va67Pz587jnnnsQHx+P0NBQ9OjRA++++67N9cS1/59//jmee+45tGrVCuHh4SgsLPS4/3Ldu3dHs2bNcOrUKZvLKyoqMG/ePHTu3BmhoaFo06YNnnzySVRUVEjbJCcn47rrrnO4TbPZjFatWuH222+XLrNf011UVITZs2fblOZff/312Lt3r7SNszXdJSUleOyxx9CmTRuEhobiiiuuwKuvvgpBEGy20+l0mDVrFtauXYvk5GTp95iammqznZL9ICIiZrqJiEilgoIC5ObmQhAE5OTk4D//+Q+Ki4sdsqz33Xcf3n//fdx99914+OGHkZGRgaVLl2Lfvn3Yvn27TSbu5MmTuP322zFjxgxMmzYN7777LqZPn47+/fujR48eAIDi4mIMHz4cR44cwT333IN+/fohNzcX69atw59//olmzZpJt7dw4ULo9Xo8/vjjKCgowMsvv4w777wTu3fvttnHy5cvY8yYMZg0aRLGjx+Pt99+G5MmTcInn3yC2bNn4/7778cdd9yBV155BbfffjvOnTuHqKgoAMCvv/6KHTt2YNKkSWjdujVOnz6Nt99+G9deey3S09MRHh5uc18PPvggmjdvjrlz56KkpMTp7/bUqVMYMWIEmjZtio0bN9o8Jq198sknqKysxEMPPYS8vDy8/PLLmDBhAkaMGIEtW7bgqaeewsmTJ/Gf//wHjz/+uE3g+tFHH2HatGkYNWoUFi1ahNLSUrz99tsYNmwY9u3b57aB1zfffAOj0YgJEyY4/XmHDh0wbNgw/PjjjygrK8OAAQPQsWNHfPHFF5g2bZrNtitXrkSTJk2kLHN2djauuuoqKWhs3rw5vv/+e8yYMQOFhYWYPXu2zfVfeOEFhISE4PHHH0dFRQVCQkJU/Q4LCgpw+fJlm5MHZrMZY8eOxbZt2/C3v/0N3bt3x6FDh/DGG2/g+PHjWLt2LQBg4sSJmD9/PrKyspCQkCBdf9u2bbhw4QImTZrk8n7vv/9+rF69GrNmzUJSUhIuXbqEbdu24ciRI+jXr5/T6wiCgLFjx+Knn37CjBkz0KdPH2zYsAFPPPEEzp8/jzfeeMNm+23btmHNmjV48MEHERUVhTfffBO33XYbzp49i7i4OK/3g4ioURKIiIgUeO+99wQADl+hoaHC+++/b7Pt1q1bBQDCJ598YnN5amqqw+Xt2rUTAAi//PKLdFlOTo4QGhoqPPbYY9Jlc+fOFQAIa9ascdg3s9ksCIIg/PTTTwIAoXv37kJFRYX08yVLlggAhEOHDkmXXXPNNQIA4dNPP5UuO3r0qABA0Ov1wq5du6TLN2zYIAAQ3nvvPemy0tJSh/3YuXOnAED48MMPHX5vw4YNE6qrq222nzdvngBAuHjxonDkyBGhZcuWwpVXXink5eU53LY7f//73wVXH+nTpk0T2rVrJ32fkZEhABCaN28u5OfnS5c/88wzAgChd+/eQlVVlXT55MmThZCQEKG8vFwQBEEoKioSYmNjhZkzZ9rcT1ZWlhATE+Nwub3Y2Fihd+/ebrd5+OGHBQDCwYMHpX0LDg62+b1UVFQIsbGxwj333CNdNmPGDCExMVHIzc21ub1JkyYJMTEx0t9MfJ507NjR6d/RGQDCjBkzhIsXLwo5OTnCb7/9JqSkpAgAhFdeeUXa7qOPPhL0er2wdetWm+svX75cACBs375dEARBOHbsmABA+M9//mOz3YMPPihERkba7BcAYd68edL3MTExwt///ne3+2v/d1+7dq0AQHjxxRdttrv99tsFnU4nnDx50ub+QkJCbC47cOCAw/4q2Q8iIhIElpcTEZEqy5Ytw8aNG7Fx40Z8/PHHuO6663DvvfdizZo10jarVq1CTEwMrr/+euTm5kpf/fv3R2RkJH766Seb20xKSsLw4cOl75s3b44rrrgCf/zxh3TZl19+id69e+Ovf/2rwz7pdDqb7++++26brKV42/LbA4DIyEibjOIVV1yB2NhYdO/eHYMGDZIuF/8vv35YWJj0/6qqKly6dAmdO3dGbGys0/LamTNnwmAwOFwOAGlpabjmmmvQvn17bNq0CU2aNHG6nZbGjx+PmJgY6XvxMU6ZMsVmqcCgQYNQWVmJ8+fPAwA2btyI/Px8TJ482eZvazAYMGjQIIe/rb2ioiKpWsAV8ediuffEiRNRVVVl8xz74YcfkJ+fj4kTJwKwZHK//PJL3HzzzRAEwWbfRo0ahYKCAoe/y7Rp02z+jp7873//Q/PmzdGiRQsMGDAAmzdvxpNPPok5c+ZI26xatQrdu3dHt27dbPZhxIgRACD9frp27Yo+ffpg5cqV0nVNJhNWr16Nm2++2e1+xcbGYvfu3bhw4YLiff/uu+9gMBjw8MMP21z+2GOPQRAEhwkEI0eOtMng9+rVC9HR0TavAW/2g4ioMWJ5ORERqTJw4ECbRmqTJ09G3759MWvWLIwZMwYhISE4ceIECgoK0KJFC6e3kZOTY/N927ZtHbZp0qSJzfrvU6dO4bbbblO0j/a3Jwax9uvJW7du7RCwx8TEoE2bNg6X2V+/rKwMCxYswHvvvYfz58/brIstKChw2KcOHTq43N+bb74Z8fHx2LBhAyIjI909NM3Y/47Ex+jpsZ84cQIApCDSXnR0tNv7jYqKQlFRkdttxJ+LwXfv3r3RrVs3rFy5EjNmzABgKS1v1qyZtB8XL15Efn4+3nnnHbzzzjtOb9f+eefub+LMLbfcglmzZqGyshK//vorXnrpJZSWlto0Xztx4gSOHDmC5s2be9yHiRMn4tlnn8X58+fRqlUrbNmyBTk5OdKJBFdefvllTJs2DW3atEH//v1x0003YerUqejYsaPL65w5cwYtW7Z0OOHRvXt36edySl6T3uwHEVFjxKCbiIh8otfrcd1112HJkiU4ceIEevToAbPZjBYtWuCTTz5xeh37gMRVBliwa/CklNLbc7Wdkus/9NBDeO+99zB79mwMHjwYMTEx0Ol0mDRpktNRTe4yl7fddhs++OADfPLJJ7jvvvtcbqclbx+7+Ng++ugjm7XIIk+jrbp37459+/ahoqICoaGhTrc5ePAggoOD0aVLF+myiRMn4t///jdyc3MRFRWFdevWYfLkydL9ifs1ZcoUh7Xfol69etl8rybLDVhO0owcORIAcNNNN6FZs2aYNWsWrrvuOtx6663SfvTs2ROvv/6609uQn9SYOHEinnnmGaxatQqzZ8/GF198gZiYGKSkpLjdjwkTJmD48OH46quv8MMPP+CVV17BokWLsGbNGtx4442qHpMrSl4DtbEfREQNAYNuIiLyWXV1NQBLszMA6NSpEzZt2oShQ4eqDmxc6dSpE9LS0jS5LS2sXr0a06ZNw2uvvSZdVl5ejvz8fNW39corryAoKEhqWnXHHXdouKfaEkuOW7RoIQWgaowZMwY7d+7EqlWrnI44O336NLZu3YqRI0faPHcmTpyI559/Hl9++SXi4+NRWFhoszSgefPmiIqKgslk8mq/vHHffffhjTfewHPPPYe//vWv0Ol06NSpEw4cOIC//OUvDlUU9jp06ICBAwdi5cqVmDVrFtasWYNx48a5PBkhl5iYiAcffBAPPvggcnJy0K9fP/z73/92Gey2a9cOmzZtcijvP3r0qPRzb6jdDyKixohruomIyCdVVVX44YcfEBISIpWqTpgwASaTCS+88ILD9tXV1V4FprfddhsOHDiAr776yuFn3mbEfWEwGBzu9z//+Q9MJpPq29LpdHjnnXdw++23Y9q0aVi3bp1Wu6m5UaNGITo6Gi+99BKqqqocfn7x4kW317/vvvvQokULPPHEEw5r7MvLy3H33XdDEATMnTvX5mfdu3dHz549sXLlSqxcuRKJiYm4+uqrpZ8bDAbcdttt+PLLL52enPG0X94ICgrCY489hiNHjuDrr78GYHnunz9/HitWrHDYvqyszKFz/cSJE7Fr1y68++67yM3N9VhabjKZHJYvtGjRAi1btrQZSWbvpptugslkwtKlS20uf+ONN6DT6VQHyd7uBxFRY8RMNxERqfL9999L2bGcnBx8+umnOHHiBJ5++mlpPe8111yD++67DwsWLMD+/ftxww03IDg4GCdOnMCqVauwZMkSmznESjzxxBNYvXo1xo8fj3vuuQf9+/dHXl4e1q1bh+XLl6N3796aP1Z3xowZg48++ggxMTFISkrCzp07sWnTJmmcklp6vR4ff/wxxo0bhwkTJuC7775zuW46kKKjo/H222/jrrvuQr9+/TBp0iQ0b94cZ8+exfr16zF06FCHwE4uLi4Oq1evxujRo9GvXz/ce++9SEpKQlZWFt5//32cPHkSS5YswZAhQxyuO3HiRMydOxdGoxEzZsywWUsNWEbF/fTTTxg0aBBmzpyJpKQk5OXlYe/evdi0aZPTeeO+mj59OubOnYtFixZh3LhxuOuuu/DFF1/g/vvvx08//YShQ4fCZDLh6NGj+OKLL7BhwwabnggTJkzA448/jscffxxNmzb1mKUvKipC69atcfvtt6N3796IjIzEpk2b8Ouvv9pUXdi7+eabcd111+Ef//gHTp8+jd69e+OHH37A119/jdmzZzvMTPfE2/0gImqMGHQTEZEq8gyk0WhEt27d8PbbbzusRV6+fDn69++P//73v3j22WcRFBSE9u3bY8qUKRg6dKjq+42MjMTWrVsxb948fPXVV/jggw/QokUL/OUvf0Hr1q19flxqLVmyBAaDAZ988gnKy8sxdOhQbNq0SZoZ7Y3g4GCsXr0aN954I2655RZs2rTJpot6XXHHHXegZcuWWLhwIV555RVUVFSgVatWGD58OO6++26P1x8+fDgOHjyIl156CatWrUJmZiZiYmIwZMgQvPvuuxg2bJjT602cOBHPPfccSktLnWaE4+PjsWfPHvzrX//CmjVr8NZbbyEuLg49evTAokWLfH7czoSFhWHWrFmYP38+tmzZgmuvvRZr167FG2+8gQ8//BBfffUVwsPD0bFjRzzyyCPo2rWrzfVbt26NIUOGYPv27bj33ntt5tc7Ex4ejgcffBA//PAD1qxZA7PZjM6dO+Ott97CAw884PJ6er0e69atw9y5c7Fy5Uq89957aN++PV555RU89thjqh+3t/tBRNQY6YRA1OQRERERERERNQJc001ERERERETkJwy6iYiIiIiIiPyEQTcRERERERGRnzDoJiIiIiIiIvITBt1EREREREREflIngu5ly5ahffv2MBqNGDRoEPbs2eN2+1WrVqFbt24wGo3o2bMnvvvuO5ufr1mzBjfccAPi4uKg0+mwf/9+h9u477770KlTJ4SFhaF58+a45ZZbpLmzRERERERERFoI+JzulStXYs6cOVi+fDkGDRqExYsXY9SoUTh27BhatGjhsP2OHTswefJkLFiwAGPGjMGnn36KcePGYe/evUhOTgYAlJSUYNiwYZgwYQJmzpzp9H779++PO++8E23btkVeXh7mz5+PG264ARkZGTAYDB7322w248KFC4iKioJOp/Ptl0BERERERET1iiAIKCoqQsuWLaHXu8lnCwE2cOBA4e9//7v0vclkElq2bCksWLDA6fYTJkwQRo8ebXPZoEGDhPvuu89h24yMDAGAsG/fPo/7ceDAAQGAcPLkSUX7fe7cOQEAv/jFL37xi1/84he/+MUvfvGrEX+dO3fObewY0Ex3ZWUlfv/9dzzzzDPSZXq9HiNHjsTOnTudXmfnzp2YM2eOzWWjRo3C2rVrvd6PkpISvPfee+jQoQPatGmj6DpRUVEAgHPnziE6Otrr+yYiIiIiIqL6p7CwEG3atJFiQ1cCGnTn5ubCZDIhPj7e5vL4+HiX66uzsrKcbp+VlaX6/t966y08+eSTKCkpwRVXXIGNGzciJCTE6bYVFRWoqKiQvi8qKgIAREdHM+gmIiIiIiJqpDwtN64TjdQC5c4778S+ffvw888/o2vXrpgwYQLKy8udbrtgwQLExMRIX0oz4kRERERERNR4BTTobtasGQwGA7Kzs20uz87ORkJCgtPrJCQkqNrenZiYGHTp0gVXX301Vq9ejaNHj+Krr75yuu0zzzyDgoIC6evcuXOq74+IiIiIiIgal4AG3SEhIejfvz82b94sXWY2m7F582YMHjzY6XUGDx5ssz0AbNy40eX2SgmCAEEQbErI5UJDQ6VScpaUExERERERkRIBHxk2Z84cTJs2DQMGDMDAgQOxePFilJSU4O677wYATJ06Fa1atcKCBQsAAI888giuueYavPbaaxg9ejQ+//xz/Pbbb3jnnXek28zLy8PZs2dx4cIFAMCxY8cAWLLkCQkJ+OOPP7By5UrccMMNaN68Of78808sXLgQYWFhuOmmm2r5N0BEREREREQNVcCD7okTJ+LixYuYO3cusrKy0KdPH6SmpkrN0s6ePWsz82zIkCH49NNP8dxzz+HZZ59Fly5dsHbtWmlGNwCsW7dOCtoBYNKkSQCAefPmYf78+TAajdi6dSsWL16My5cvIz4+HldffTV27NjhdDY4ERERERERkTd0giAIgd6J+qiwsBAxMTEoKChgqTkREREREVEjozQmbNTdy4mIiIiIiIj8iUE3ERERERERkZ8w6CYiIiIiIiLyk4A3UiMiIiIiIqJ6ymwCzuwAirOByHig3RBAbwj0XtUpDLqJiIiIiIhIvfR1QOpTQOGFmsuiWwIpi4CksYHbrzqG5eVERERERESkTvo64IuptgE3ABRmWi5PXxeY/aqDGHQTERERERGRcmaTJcMNZ9OnrZelPm3Zjhh0ExERERERkQpndjhmuG0IQOF5y3bEoJuIiIiIiIhUKM7WdrsGjkE3ERERERERKRcZr+12DRyDbiIiIiIiIlKu3RBLl3LoXGygA6JbWbYjBt1ERERERESkgt5gGQsGwDHwtn6fspDzuq0YdBMREREREZE6SWOBCR8C0Ym2l0e3tFzOOd0SBt1ERERERESkXtJYYHZazfddbwJmH2LAbYdBNxEREREREXlHXkIe05Il5U4w6CYiIiIiIiLyEwbdRERERERERH7CoJuIiIiIiIjITxh0ExEREREREfkJg24iIiIiIiIiP2HQTUREREREROQnDLqJiIiIiIjId4IQ6D2okxh0ExEREREREfkJg24iIiIiIiLynU4X6D2okxh0ExEREREREfkJg24iIiIiIiLyHdd0O8Wgm4iIiIiIiMhPGHQTERERERGR77im2ykG3URERERERER+wqCbiIiIiIiIyE8YdBMREREREZHv2EjNKQbdRERERERERH7CoJuIiIiIiIh8x0ZqTjHoJiIiIiIiIvITBt1ERERERETkO67pdopBNxEREREREZGfMOgmIiIiIiIi33FNt1MMuomIiIiIiIj8hEE3ERERERER+Y5rup1i0E1ERERERETkJwy6iYiIiIiIiPyEQTcRERERERH5jo3UnGLQTUREREREROQnDLqJiIiIiIjId2yk5hSDbiIiIiIiIiI/YdBNREREREREvuOabqfqRNC9bNkytG/fHkajEYMGDcKePXvcbr9q1Sp069YNRqMRPXv2xHfffWfz8zVr1uCGG25AXFwcdDod9u/fb/PzvLw8PPTQQ7jiiisQFhaGtm3b4uGHH0ZBQYHWD42IiIiIiIgasYAH3StXrsScOXMwb9487N27F71798aoUaOQk5PjdPsdO3Zg8uTJmDFjBvbt24dx48Zh3LhxSEtLk7YpKSnBsGHDsGjRIqe3ceHCBVy4cAGvvvoq0tLS8P777yM1NRUzZszwy2MkIiIiIiJq8Lim2ymdIAT2NzNo0CBceeWVWLp0KQDAbDajTZs2eOihh/D00087bD9x4kSUlJTg22+/lS676qqr0KdPHyxfvtxm29OnT6NDhw7Yt28f+vTp43Y/Vq1ahSlTpqCkpARBQUEe97uwsBAxMTEoKChAdHS0gkdKRERERETUAM2Psfw7YAYw5vXA7kstUhoTBjTTXVlZid9//x0jR46ULtPr9Rg5ciR27tzp9Do7d+602R4ARo0a5XJ7pcRflKuAu6KiAoWFhTZfRERERERERO4ENOjOzc2FyWRCfHy8zeXx8fHIyspyep2srCxV2yvdjxdeeAF/+9vfXG6zYMECxMTESF9t2rTx+v6IiIiIiIgaHDZScyrga7oDrbCwEKNHj0ZSUhLmz5/vcrtnnnkGBQUF0te5c+dqbyeJiIiIiIioXvK8eNmPmjVrBoPBgOzsbJvLs7OzkZCQ4PQ6CQkJqrZ3p6ioCCkpKYiKisJXX32F4OBgl9uGhoYiNDRU9X0QERERERE1Cmyk5lRAM90hISHo378/Nm/eLF1mNpuxefNmDB482Ol1Bg8ebLM9AGzcuNHl9q4UFhbihhtuQEhICNatWwej0aj+ARARERERETU0ZhOQsRU4tNryr9kU6D2q1wKa6QaAOXPmYNq0aRgwYAAGDhyIxYsXo6SkBHfffTcAYOrUqWjVqhUWLFgAAHjkkUdwzTXX4LXXXsPo0aPx+eef47fffsM777wj3WZeXh7Onj2LCxcuAACOHTsGwJIlT0hIkALu0tJSfPzxxzaN0Zo3bw6DwVCbvwIiIiIiIqK6IX0dkPoUUHih5rLolkDKIiBprPvrck23UwEPuidOnIiLFy9i7ty5yMrKQp8+fZCamio1Szt79iz0+pqE/JAhQ/Dpp5/iueeew7PPPosuXbpg7dq1SE5OlrZZt26dFLQDwKRJkwAA8+bNw/z587F3717s3r0bANC5c2eb/cnIyED79u399XCJiIiIiIjqpvR1wBdTAdiViRdmWi6f8KHnwJscBHxOd33FOd1ERERERNRgmE3A4mTbDLcNnSXjPfsQoLerDJbP6b7pFeDMDqA4G4iMB9oNcdy+gVAaEwY8001EREREREQBdmaHm4AbAASg8Lxluw7DnW+S9wfwalegNLfmMqWl6Q1Yox8ZRkRERERE1OgVZ3vextN2f/xkG3ADlkD+i6mW0vVGikE3ERERERFRYxcZr+12NgQg9elG2wWdQTcREREREVFj126IpRQcrjqQ64DoVpbt5JQG0mJpeiPEoJuIiIiIiKix0xssa68BOAbe1u9TFjo2RVMTSCstYW9gGHQTERERERGRpdnZhA+B6ETby6Nbuh4XpiaQ9qo0vf5j0E1EREREREQWSWOB2Wk133caaRkT5qr7uNJAOryZY2l6I8Ggm4iIiIiIiGrIS8ij4t3P2VYaSI9+rcHO6/aEQTcRERERERF5R0kgPeRhoMc4v+9KXRUU6B0gIiIiIiKiBiDICFSX13wfHgfc9DqQPC5gu1QXMNNNREREREREzgmC8m07j7T9/uEDjT7gBhh0ExERERERkRZ0dqPGDCysBlheTkRERERE5B2zyTKnujjb0sW73ZCG1yzMPpAm1Rh0ExERERERqZW+Dkh9Cii8UHNZdEsgZZHr8Vr1kZrycvtt1Vy3AWN5ORERERERkRrp64AvptoG3ABQmGm5PH1dYPaL6iQG3UREREREREqZTZYMN5xlca2XpT5t2a6xsS9FZ2k6AAbdREREREREyp3Z4ZjhtiEAhect2zUEagJnlpc7xTXdRBowmQXsychDTlE5WkQZMbBDUxj0PLNHRERE1OAUZ2u7XV3HwNlnDLqJfJSalonnv0lHZkG5dFlijBHzbk5CSnJiAPeMiIiIiDQXGa/tdtTgsbycyAepaZl44OO9NgE3AGQVlOOBj/ciNS0zQHtGRERERH7RboilSzlcVTXqgOhWlu0aAq7L9hmDbiIvmcwCnv8m3V0LDTz/TTpMZpbkEBERETUYeoNlLJhT1gA1ZWHDmdetprycAbpTDLqJvLQnI88hwy0nAMgsKMeejLza2ykiIiIi8r+kscCED4HgcNvLo1taLm9Ic7rV4Ppvp7imm8hLOUWuA25vtiMiIiKieiRpLHBqM/D7+5bvp31rKSlvKBluTTAIBxh0E3mtRZRR0+2IiIiIqJ7RyQqHOwwP3H5QncbyciIvDezQFIkxRnctNJAYYxkfRkREREREjRODbiIvGfQ6zLs5CYBj70rx+3k3J3FeNxERERE1TlzjDYBBN5FPUpIT8faUfkiIsS0hT4gx4u0p/Tinm4iIiIiokeOabiIfpSQn4vqkBHR69jvpsm1PjWCGm4iIiIiImOkm0oJBr0OwQWfzPRERERFR48bycoBBN5FmjMEcD0FERERERLYYdBNpJDyEQTcREREREdli0E2kEWa6iYiIiIhk2L0cAINuIs2EyYJugW8wREREREQEBt1EmpFnuitN5gDuCRERERER1RUMuok0Is90l1aYArgnRERERER1Aas/AQbdRJrRyaaEFVdUB25HiIiIiIiozmDQTaQRs2wdd2klM91ERERE1Miwr5FTDLqJNGKWvceUVDLTTURERESNjLz0E2AQbsWgm0gjZlnUXcLyciIiIiIiAoNuIs3Iy8tL2EiNiIiIiBobZradYtBNpBF5eXkpy8uJiIiIqNFjEA4w6CbSjCCwvJyIiIiIiGwFBXoHiBoKkzzodtG93GQWsCcjDzlF5WgRZcTADk1h0OucbktERERERPUfg24ijZjNNf8vdZLpTk3LxPPfpCOzoFy6LDHGiHk3JyElObE2dpGIiIiISCUfSsS5xhsAy8uJNCNvpFZs10gtNS0TD3y81ybgBoCsgnI88PFepKZl1so+EhERERFR7Qp40L1s2TK0b98eRqMRgwYNwp49e9xuv2rVKnTr1g1GoxE9e/bEd999Z/PzNWvW4IYbbkBcXBx0Oh3279/vcBvvvPMOrr32WkRHR0On0yE/P1/DR0SNlTzoljdSM5kFPP9NutNzhOJlz3+TDpOZZwKJiIiIqK7hUkhfBTToXrlyJebMmYN58+Zh79696N27N0aNGoWcnByn2+/YsQOTJ0/GjBkzsG/fPowbNw7jxo1DWlqatE1JSQmGDRuGRYsWubzf0tJSpKSk4Nlnn9X8MVHjJY+Z5Wu692TkOWS45QQAmQXl2JOR58e9IyIiIiKiQAjomu7XX38dM2fOxN133w0AWL58OdavX493330XTz/9tMP2S5YsQUpKCp544gkAwAsvvICNGzdi6dKlWL58OQDgrrvuAgCcPn3a5f3Onj0bALBlyxbtHgw1ejaZbtma7pwi1wG3nNLtiIiIiIhqD6sxfRWwTHdlZSV+//13jBw5smZn9HqMHDkSO3fudHqdnTt32mwPAKNGjXK5PVFtkveJKJYF3S2ijIqur3Q7IiIiIqK6iQG6MwHLdOfm5sJkMiE+Pt7m8vj4eBw9etTpdbKyspxun5WV5bf9FFVUVKCiokL6vrCw0O/3SfWLfE12qay8/Lcz7svGdQASYizjw4iIiIiI6hYVa7rtu5WzezmAOtBIrb5YsGABYmJipK82bdoEepeojpGXl2cXlmPnqUt4e8tJvPbDcely+7cs8ft5NydxXjcRERER1UEMnH0VsKC7WbNmMBgMyM7Otrk8OzsbCQkJTq+TkJCganstPfPMMygoKJC+zp075/f7pPqlVDYmLKeoApNX7MKi1GMAgMdv6IrlU/qhRXSozXUSYox4e0o/zukmIiIiImqgAhZ0h4SEoH///ti8ebN0mdlsxubNmzF48GCn1xk8eLDN9gCwceNGl9trKTQ0FNHR0TZfRKLUtEzklVa6/HnnFpFISU7EL09cJ122Ymp/bHtqBANuIiIiIqrD1FRj2mfFmSUHAty9fM6cOZg2bRoGDBiAgQMHYvHixSgpKZG6mU+dOhWtWrXCggULAACPPPIIrrnmGrz22msYPXo0Pv/8c/z222945513pNvMy8vD2bNnceHCBQDAsWOWTGNCQoKUEc/KykJWVhZOnjwJADh06BCioqLQtm1bNG3KdbWkjjiH2xUdLHO4r09KQGiwAUF6HarNAnq1jmVJORERERHVcQycfRXQNd0TJ07Eq6++irlz56JPnz7Yv38/UlNTpWZpZ8+eRWZmprT9kCFD8Omnn+Kdd95B7969sXr1aqxduxbJycnSNuvWrUPfvn0xevRoAMCkSZPQt29faaQYYBlN1rdvX8ycORMAcPXVV6Nv375Yt25dbTxsamDUzuEODbK87CqqzLWxe0REREREFEABzXQDwKxZszBr1iynP3M2R3v8+PEYP368y9ubPn06pk+f7vY+58+fj/nz56vYSyLX1M7hDg02oKTShPJqk4drEBERERHVI+xW7hS7lxP5SO0cbma6iYiIiKj+8GE5JINwAAy6iXw2sENTJMa4Drx1ABJlc7iloJuZbiIiIiKq8xg4+4pBN5GPDHod5t2c5PRnzuZwhwYZAAAV1cx0ExEREVEdYjYBGVuBQ6u9vAEG6M4EfE03UUOQkpyIkCA9Ku0C6YQYI+bdnGQzFswYzEw3EREREdUx6euA1KeAwgu2l18+awnEi7OByHig3RBAb1B4owzCAQbdRJoRp3+9OakPBFjWcA/s0NRhLJiU6eaabiIiIiKqC9LXAV9MhdMg+ex24IMxNd9HtwRSFgFJY2tt9+o7Bt1EGjFb36MGtG+KlrFhLrcLtWa62b2ciIiIiALObLJkuJVmpQszLQH6hA8dA282TnOKa7qJNCJY32T0OvcdHtm9nIiIiIjqjDM7HEvK3bIG1qlPWwJ2t5syCAcYdBNpxmQWg27327GRGhEREVFD4cM4rbqiONuLKwlA4XlLwG5/OTlg0E2kEbG8XKc0083yciIiIqJ6rgEEmZHx3l/Xq4C98WHQTaQBQVY6Y984zV5oMBupEREREVEd0W4IEB7n3XU9BuwN4KSEBhh0E2lALC0HlJSXi5luBt1EREREFGB6A9Brosor6YDoVpaAXY5ruJ1i0E2kAVnM7bm8XOxeXsXyciIiIiKqA4yxKja2HuumLFQxr7txY9BNpAGzmvJyNlIjIiIiorrCbAJ+f0/59tEtnY8LA+BQTs7MNwDO6SbShPz9RHl5OTPdRERERBRgZ3YARZnKtg2OAGYfYoZbJQbdRBowCfI13Uq7lzPTTURERES1zGyyBNrF2ZZGaEoDbgCoKgGOrneR5SZXGHQTaUBeXu4h5oaR3cuJiIiIKBDS1wGpTwGFF2ouU9u5PPVpoNtohdlulpcDXgTdGRkZ2Lp1K86cOYPS0lI0b94cffv2xeDBg2E0Gv2xj0R1niCLnw2c001EREREdU36OuCLqXAIhEsvqbudwvOWTHmH4Y4/4xpupxQH3Z988gmWLFmC3377DfHx8WjZsiXCwsKQl5eHU6dOwWg04s4778RTTz2Fdu3a+XOfieocVeXl1kx3OTPdRERERPWchxLHusJssmS4tco8F2drczuNhKKgu2/fvggJCcH06dPx5Zdfok2bNjY/r6iowM6dO/H5559jwIABeOuttzB+/Hi/7DCRt0xmAXsy8pBTVI4WUUYM7NDUY6dxpdSUlzPTTURERNRQ1JPM7pkdtiXlLukBKEgMRcYru19mvgEoDLoXLlyIUaNGufx5aGgorr32Wlx77bX497//jdOnT2u1f0SaSE3LxPPfpCOzoFy6LDHGiHk3JyElOdHn2xeDbr1OwZxuNlIjIiIiotqkNDMd0QIoyXK/TXQroN0QFz9kkO2Mojnd7gJue3Fxcejfv7/XO0SktdS0TDzw8V6bgBsAsgrK8cDHe5GapqJjowviSTxPpeUA53QTERERUS1TmpkOCva8TcpCjgxTSVHQLbd3714cOnRI+v7rr7/GuHHj8Oyzz6KyslLTnSPylcks4Plv0p2ecxMve/6bdJjMvp2VE6+vJOg2BrO8nIiIiIhqUbshQHRLuF6DrrNksIPD3d+OMdb9uDCHcnJmvgEvgu777rsPx48fBwD88ccfmDRpEsLDw7Fq1So8+eSTmu8gkS/2ZOQ5ZLjlBACZBeXYk5Hn0/2I5eUKYu6aTDcbqRERERFRbdAbgJRFLn5oPYBNWej5YDbIw7Qqwe741swkE+BF0H38+HH06dMHALBq1SpcffXV+PTTT/H+++/jyy+/1Hr/iHySU+Q64PZmO1fEk3pKGrOFWjPd5VV8EyIiIiKiWpI0FpjwoSVbLRfd0nJ50lgFjc88/Pzcbtvvlw8Dtixq9MG36qBbEASYzZYzGJs2bcJNN90EAGjTpg1yc3O13TsiH7WIUjY7Xul2rqgpL2cjNSIiIqKGop6MDBMljQVG/bvm+2nfArMPuS8ZV8Nkt9y4ohDY8hLwSmfLnPBGSnXQPWDAALz44ov46KOP8PPPP2P06NEAgIyMDMTHK1ygT1RLBnZoisQYo7vVK0iMsYwP84VX5eXVZggco0BERERUj9XDYzmdrAlah+G+N0VTksUuywO+mNpoA2/VQffixYuxd+9ezJo1C//4xz/QuXNnAMDq1asxZIir1vFEgWHQ6zDv5iSnb4difDzv5iSf53Wb1XQvD6552VWamO0mIiIionrszA6FGwpA6tONstRc0ZxuuV69etl0Lxe98sorMBjYOp7qnpTkRPRuHYMDfxbYXJ6g4ZxuMWOtJHg3BtW8TiqqzVLmm4iIiIjqm3pWXu4PSmeAA0DheUuQ3mG4//anDlIddLtiNPq2JpbIX0oqqnE0q8jmsvfvvhLDuzT3OcMtMgnimm7P2wYbdNDpLH0qyqtMiDYqmIdIRERERHVQPSwv15rSGeAiNUF6A6Eo6G7SpAl0SharAsjL8230EpHWthy7iIpqM1rGGHHBOj6sb9smmgXcAGDtLajodaLT6RAapEd5lZljw4iIiIiofmuncomx2iC9AVAUdC9evFj6/6VLl/Diiy9i1KhRGDx4MABg586d2LBhA/75z3/6ZSeJfPF9WiYAYEzvlnjnlz8AANUar6UWG6kZFJ6cCg0yWIJudjAnIiIiorrAbAIqS91v46wJsOJGbDrLeDK1QXoDoCjonjZtmvT/2267Df/6178wa9Ys6bKHH34YS5cuxaZNm/Doo49qv5dEXiqvMuGnozkAgBuTE/B/W/+AWQCqzdqWAplVlJcD8rFhja+RBBERERHVMenrgNSngMILfroD60FyykLfu6XXQ6q7l2/YsAEpKSkOl6ekpGDTpk2a7BSRVraeyEVJpQkJ0Ub0bh2LIIPlKV+leabb8q/SZRhiB3NmuomIiIgooNLXWcZ5eRtwKxkDFt0SmPChdvPA6xnVQXdcXBy+/vprh8u//vprxMXFabJTRFpJTcsCAKQkJ0Cv1yHYmoquNvkp063wFSV2MOeabiIiIiIKGLPJkuH2tiGcdH03wuKAh/c32oAb8KJ7+fPPP497770XW7ZswaBBgwAAu3fvRmpqKlasWKH5DhJ5q8pkxqYjlu6IKckJAGDNdJs0Ly8X1K7ptma6y1leTkRERFSP1fORYWd2+FZSruT6ZZeAbW8A13oIzhsw1UH39OnT0b17d7z55ptYs2YNAKB79+7Ytm2bFIQT1QU7T11CQVkVmkWG4Mr2TQFYxnUBQLVZ2wyzWK2uV9FIDWCmm4hIZDIL2JORh5yicrSIMmJgh6aaTpkgIvKPejgyTH686uv4LqXX3/IS0KJ7o812ezWne9CgQfjkk0+03hciTX1vLS2/PilBOnALstZ/+6u8XGHMzUZqREQyqWmZeP6bdGRaxzoCQGKMEfNuTkJKcmIA94yIqIHzdXyXmuunPg10G90oG6l5FXSbzWacPHkSOTk5MNtlDK+++mpNdozIFyazgI3plqD7RmtpOQAEWTPd2jdSs5aXK8zK1ATdzHQTUeOWmpaJBz7e65AryiooxwMf78XbU/ox8CaiOqyeV+S0G2JpclaYCWVZe7ttpOsrKFEvPG8pR+8w3Js9rddUB927du3CHXfcgTNnzkjrWEU6nQ4mEzN3FHi/ns5DbnEloo1BGNyppsFfsLV7ueYjw7wtL2fQTUSNmMks4Plv0p0e5gmwHMo+/026TcUSEVHdUg/Ly+UOfwX0nQr8vNC76+sNQMoi4Iu7lG3vazl7PaU66L7//vsxYMAArF+/HomJiYpHJBHVJrFr+cikeCnQBmoy0f7KdCt9PRjFkWFVPElFRI3Xnow8m5JyewKAzIJy7MnIszmBSkREPji/t+b/X87w/fbUrNP2tZy9nlIddJ84cQKrV69G586d/bE/RD4zmwVsOCyWltuWJAb5e2SY4jXdzHQTEeUUuQ64vdmOiKj21bMEZPo6YM9/A3DHOksZershAbjvwFM9p3vQoEE4efKkP/aFSBMH/sxHZkE5IkIMGN6lmc3PasrLtQ12xZUWitd0M9NNRIQWUUZNtyMiqn31qLxcyUxtv7AeH6cs9NxEzWwCMrYCh1Zb/jU3jGNl1Znuhx56CI899hiysrLQs2dPBAcH2/y8V69emu0ckTfE0vLrurWAMdj2hS02UtM6020yqysvZyM1IiJgYIemSIwxIqug3Olhqw5AQoxlfBgREfnI15ncrqSvc//z6JaWgNtTGXr6OstJAfk+Rre0rBmv56PGVAfdt912GwDgnnvukS7T6XQQBIGN1CjgBEGQRoXZl5YDQLDeT43UWF5ORA2Yv2ZoG/Q6zLs5CQ98vNfhZ+Ktz7s5iU3UiIi04I8mZunrgC+mut9m9iHPGW7pduyO0QszLZdP+LBeB96qg+6MjAx/7AeRJtIzC3E2rxShQXpce0Vzh5/7b2SY5V+D6kw3T1IRUd3m7xnaKcmJeHtKP8z54gBKK2veExM4p5uI6oV6dFJQ6yZmUrm6j8kst7djnWVRz2d8qw6627Vr54/9INLEBmuW++quzRER6vj0DhLXdPutkZrS7uXWTHcVM91EVHfV1gztlOREbD6Sg1W//wmDXoePZwzSLJtORORf9WhNt5qZ2kooLVffvdwS8EfGW/bBPnD2eDtCvZ/xrbqRGgCcOnUKDz30EEaOHImRI0fi4YcfxqlTp7zeiWXLlqF9+/YwGo0YNGgQ9uzZ43b7VatWoVu3bjAajejZsye+++47m5+vWbMGN9xwA+Li4qDT6bB//36H2ygvL8ff//53xMXFITIyErfddhuysxvn3LiGpKa0PMHpz4PF7uUaN1KrGRmmbHuxkVo5M91EVEd5mqENWGZomzRariNmuU1mAVd1ZMDtK5NZwM5Tl/D1/vPYeeqSZn8nIqrHxJnaWlFarr7hWctosg/GAIuTHdeAK72dejzjW3XQvWHDBiQlJWHPnj3o1asXevXqhd27d6NHjx7YuHGj6h1YuXIl5syZg3nz5mHv3r3o3bs3Ro0ahZycHKfb79ixA5MnT8aMGTOwb98+jBs3DuPGjUNaWpq0TUlJCYYNG4ZFi1w/qR599FF88803WLVqFX7++WdcuHABt956q+r9p7rjZE4xTuQUI9igw1+6Oy+fqSkv1zrTbflXaaZbKi9nppuI6ig1M7S1UFRRLf2f/S58k5qWiWGLfsTkFbvwyOf7MXnFLgxb9CNS0zIDvWtEDVA9O0GYNBYYeL82t+VNubq4RlseeCu9nXo841t10P3000/j0Ucfxe7du/H666/j9ddfx+7duzF79mw89ZT6FvSvv/46Zs6cibvvvhtJSUlYvnw5wsPD8e677zrdfsmSJUhJScETTzyB7t2744UXXkC/fv2wdOlSaZu77roLc+fOxciRI53eRkFBAf73v//h9ddfx4gRI9C/f3+899572LFjB3bt2qX6MVDdIB5MDOnUDDFhwU63CRIbqWm8pluwZroVjwxjIzUiquNqe4Z2cXmV9H++N3pPXBJgf8JEXBLAwJtIA/KxVv7oBu5vrfrZft/Dy8SjWK6u6sSDNVOV+nTNODCPt6MDolvV6xnfqoPuI0eOYMaMGQ6X33PPPUhPT1d1W5WVlfj9999tgmO9Xo+RI0di586dTq+zc+dOh2B61KhRLrd35vfff0dVVZXN7XTr1g1t27Z1eTsVFRUoLCy0+aK6xVNpOSAbGaZxmV3NyDBl27ORGhHVdbU9Q7ukoub9kO+N3qntJQFEjVL6OkuJ9AdjLCXTx78P9B75LraNd9fzulxdtkbb4+2omPFdh6kOups3b+50jfT+/fvRokULVbeVm5sLk8mE+HjbUoH4+HhkZWU5vU5WVpaq7V3dRkhICGJjYxXfzoIFCxATEyN9tWnj5ZOT/OLspVIcvlAIvQ64Psl16UmQ30aGWf5VXF4ezDndRFS3iTO03eQdkKjhDO1iWXl5ZS2+Nzaktc+1vSSAqNERx1q5ym57mlfdEAh275FJYy3jvLwhX6Mt3o4h1Hab6Jb1flwY4EX38pkzZ+Jvf/sb/vjjDwwZYknxb9++HYsWLcKcOXM038G64plnnrF5fIWFhQy865DUw5ZyuYEdmiIuMtTldsFiplvzkWHq5nQbg9i9vK7x1xxiovqqtmdoFwWgvNzf49BqW20vCSBqVJSMx6rnY6285m1AbL9GO2ks8Psw4NRmy/fTvnXe7bweUh10//Of/0RUVBRee+01PPPMMwCAli1bYv78+Xj44YdV3VazZs1gMBgcuoZnZ2cjIcF5iXBCQoKq7V3dRmVlJfLz822y3e5uJzQ0FKGhroM5CqxUqbTc/UGS3xqpmVWu6Wb38jqloR14U+Pjr5NGKcmJeHVCbzz2xQGby7WeoS0Igk2muzZOSNbWOLTaVNtLAogaFSXjser5WKta5WqNtrxqtAH9HlWXl+t0Ojz66KP4888/UVBQgIKCAvz555945JFHoFO6oNUqJCQE/fv3x+bNm6XLzGYzNm/ejMGDBzu9zuDBg222B4CNGze63N6Z/v37Izg42OZ2jh07hrNnz6q6HaobsgrKsfdsPgBgVA/3J19qysu1znRb/lX6GgjVKNPdkMoiA4VNh6g+cfaa93en6ivio2y+XzG1P7Y9NULTgLSsygT525e/13Q31LXPtb0kgKhRaUhjreyPV4UAVF7W8zXaaqnOdGdkZKC6uhpdunRBVFTNB/GJEycQHByM9u3bq7q9OXPmYNq0aRgwYAAGDhyIxYsXo6SkBHfffTcAYOrUqWjVqhUWLFgAAHjkkUdwzTXX4LXXXsPo0aPx+eef47fffsM777wj3WZeXh7Onj2LCxcsZ6OOHTsGwJLhTkhIQExMDGbMmIE5c+agadOmiI6OxkMPPYTBgwfjqquuUvsroQDbcNiS5e7XNhYJMe7P3teUl2u9pltdebkWjdSYnfWdpwNvHSwH3tcnJbDUnALO2Ws+NjwY+aVVDttqla01mQVsTLc9gOwaH6X560Ge5Qb8X16uZu3z4E5xft0XLcmXBOhgWwTrjyUBRI1KQx5r9dt7tXdfOgNw+7v1fo22Wqoz3dOnT8eOHTscLt+9ezemT5+uegcmTpyIV199FXPnzkWfPn2wf/9+pKamSs3Szp49i8zMmrP1Q4YMwaeffop33nkHvXv3xurVq7F27VokJydL26xbtw59+/bF6NGjAQCTJk1C3759sXz5cmmbN954A2PGjMFtt92Gq6++GgkJCVizZo3q/afA+96azfFUWg4AQQbLU17r8vLaHhlWn7KzdTkbz6ZDVF+4es07C7gBbbK1YgZ9yeYTNpeLkyK0VFxeu0F3Q177nJKciLen9HM4CZ0QY6yXJfNEdYaS8Vj1daxVZXHt3ddt7wI9xtXe/dURqjPd+/btw9ChQx0uv+qqqzBr1iyvdmLWrFkur7tlyxaHy8aPH4/x48e7vL3p06d7PAFgNBqxbNkyLFu2TM2uUh1zqbhCCohS3IwKEwXpxZFh2h7Q1YwM83/38vqUna3r2XilB9TiiR02V6NAcPead8eXbK2r9c4AsPD7o2gfF67pa9gh013l3/Lyhr72OSU5EdcnJeDaV37CuctlGN6lGd6/eyDfv4h8IY61+mKq620aWcm0ahM+anQZbpFXa7qLioocLi8oKIDJxKZQVLt+SM+GWQCSW0WjTdNwj9uLa7o1b6SmcmSY2L3cZBZUd1KvL9lZNdn4QGXDlR5Qf7jzjObrZImU8vSa90RttlZJkK/1eufaznTXxtrnQFf5GPQ66URwk/AQBtxEWhDHWoVGu/45udaIfz+qM91XX301FixYgM8++wwGgzVwMJmwYMECDBs2TPMdJHJH7Fqe4qGBmkjsXm7SvJGayjXdwTXnu8qrzYg0KD//VR/KItVk4zemZwUsGy4eeGcVlCvKItbnrsZUf/n6WlZ6cknsgL795EWPQb7W652L7DLd/p7T7e9xaHWlyqe00pIMqdJ4TCZRo5Y0Fsg9Bvz4ouX7rjcCx78P7D5Rnac66F60aBGuvvpqXHHFFRg+3NLGfevWrSgsLMSPP/6o+Q4SuVJQVoUdp3IBQPFBjL8bqRkUZrpDZEF2RZUJkaHKX4relEV6O07I2+spzcYv/fEkFm86HrCRPe6aDjlT18r3qXHwtsRZB8s6XiXZWmdBoidantgrqeVGakDN2uc5XxyQglPA93ForkrzMwvKcf/He7G8Fk/alVVafq9aV3cRNXo6WbIkmifhyTPVQXdSUhIOHjyIpUuX4sCBAwgLC8PUqVMxa9YsNG3KERRUezYfyUaVSUCXFpHo3CJS0XWk8nKNy/zUjgzT63UIMehRaTKrPrj0lJ21P9D2NuPiS6ZG6cH4e9szAr42XTzwVhpw1NeuxlR/qa3IANRla92t33ZHy/XOjt3La2e5WkpyIn48moMvfvsTYcEGvDv9Sp96NygpzX96zaFaOWknCAJKrWvjte5jQkRy9fEEvC/7zJN43lC9phsAWrZsiZdeegnr16/H6tWrMXfuXAbcVOvEDro3KmigJqrJdAe2vByQjw1Tty9idhZwfMu0P9D2tsu5r93RlR6M55c577wM1O7a9JTkRGx7agSaRYYovk597GpM9ZP8NW9PfM1HG23PoSvtVO1tkzatZz0X1fKabrlq61nT8moTBvnYLFHJ+vv80ios/fGk1/ehVEW1GdaPJpaXE5F/pa8L9B7UeV4F3Vu3bsWUKVMwZMgQnD9/HgDw0UcfYdu2bZruHJErJRXV+OX4RQDKS8sBf44Ms/yr5mCtpoO5+oyOmJ1tER1qc7n8QNvTumrAeTMkb68np6RJUWxYsMvry9VWcKv2QLu+djWm+kl8zTsLrpdP6YePZgySLnvgmk7Y9tQIRe+N3jZp03rWs2P38toLEsXPA0EAiiurPWztnuIqnx0Zfm+sJi+Zr63y8kA3j/NFfd53IonZBGRsBQ6ttvxrroWqofR17ju6EwAvysu//PJL3HXXXbjzzjuxd+9eVFRUALB0L3/ppZfw3Xffab6TRPZ+OpaDimoz2sWFo3tilOLr1ZWRYYBsVreXB5cpyYlIbhWDYYt+AgDER4di21MjpANhNV3O5WXS3l5PTkmToruHtscbm044/NxebQa35da/hV5nOQBXUr5PVFtSkhNx5lIpFnx/FADw9p39cEMPS5nyrj8uSdu1ahKmOCD25qRW98QozdckO67prr1pKJWy+yoorUK0UdkJQWcUV/mUVvl9iYr8d6p1dZczdaV5nDfq875ToNTBkvL0dUDqU0DhhZrLolsCNywAIuKA4mwgMl7bOeJmk+U+WXLukepM94svvojly5djxYoVCA6u+WAaOnQo9u51PMAm8gd513I1gW6QnxupqSovt2a6y32YRyu/blF5tc2BtrddzrXqji5m5ppHOs/GzxrRxW02HNC+hNWTMuvvM8hFN3ktuhpTw+bvbFmZ7DXfPTFaeh7KA6yySuXvKUqDxL5tYvH4DV0BAMEqpi0oYTILyLhYAqDmPbQ2y8vlWeACN0telBjYoWmdqeKRP1f8nen2dUlSINXnfSeSiNlmecANWL5fPQ34YAzw5QzLv4uTgfO/a3O/Z3Y43ic5pfqT89ixY7j66qsdLo+JiUF+fr4W+0TkVnmVCT8dzQEApKhYzw3I53Rrvabb8q/SOd2ALNPtw8FlcUXNQVVppcmmRNObLue+XM+ZlOREfDhjoPT9/JuTpLJXd2vTRc/e1L3Wgtsqk1kKkCqrzVh6R1+HjJfSdbLUOKWmZWLYoh8xecUuPPL5fr/MdpeXDMuDKvlrv1RF0C0uBfFkdK9EDOpoycoW+hiYyom/s60nLZMoxPfSEzlFmt2HJ/LPA18fm0Gvw91D2yva1t9VPLbl5f47iaHFkqRAqc/7TnVJgJ8farPNhZnA7re1ue/ibG1upxFQHXQnJCTg5EnHBiDbtm1Dx44dNdkpIne2nshFSaUJiTFG9G4dq+q6wdKcbo0z3dbbU7Wm28tGanL2JZk5hTVn6pWsq3aWSR7YoSmaR4U6v5Kb67kiryroEh9l8zsSs+EJdgf94iaZBWWK7kMLZXYVB8M6N8f0oe2k7zs1i1C8TpYan9rKlpVWOg+uSyqcB+OeKGnSBgCtm4QhxprB9TUbLHL1OwOAX47n1lqGUT4TvLDc98c2a0QXxIa7znarfQ/1lvy54s+gW82SpLqmPu87kUR1tlnDY+DIeO1uq4FTHXTPnDkTjzzyCHbv3g2dTocLFy7gk08+weOPP44HHnjAH/tIZON764HYqB4J0KvMgvpvZJi4plv5dWqCbu/Ly+2bD+UUVUj/V3Iw7axM2qDXoV3TcNXXc6XSVPP4nD1WsXO4mG2bO6Y7Xrq1JwBgyaYTNicS/Mm+zL+wvAq5xZU131dUs6ScnKrNbFmpLLiWP2dty8vVNQMb0L6pdEJSLj7aKAWPCTFhiAy1tIHJL6vCzlO5Pj0eJV3TayvDKA9ItTihYNDrsND6HmavNpeoyJcZVPvx96jVkqRAqM/7TnVJgI8NApltbjfEsm5c099BwzzWUh10P/3007jjjjvwl7/8BcXFxbj66qtx77334r777sNDDz3kj30kklRWm7Ep3fLmomZUmCjIbyPDLP+qKi8P9q2RGuAk0y0LuoGaTHJIkO1L3V2Z9N6zl/HbmcsAHDuMO7uepzWs8sfn6rEa9DrpLXZA+6aY0L8NereJRUmlCQtTjzq9jtbKK233rai8GjmFNb/Pi0UVtdrcieqP2syWlbjIdHtbXg4A727LQJVJQK9W0fhs5iAYrf0m/ntXf6ncev3BC/jrW9sBWJoMTl6x26fSeSVd02srw1ipcdANWN57OzWPcLi8NpeoyJ8HWvcxkdNySVJtq8/7TnVYbXcQD2S2WW8AUhZpfKMNczmH6u7lOp0O//jHP/DEE0/g5MmTKC4uRlJSEiIjI/2xf0Q2dv5xCYXl1WgWGYIB7dWX5okNgLQ+ABG8aKRmtAbC5T4EcvZB909Hc9A8MhQDZbNmU5IT0aXFCRy+YFkj+faUfrghKcFplkUQBLz4bToAYHz/1njw2s647rUtCDXo8P49g2xuF1DW8VVePu+ulL7UmrULDzFAr9fh+bE9MG7ZdqzZex53DmqH/u2aqPrdqGVfkltUXoWLdtmN7IIKtI1zXgVAjVdtZsvkgZS8fNgm062ivLygtAof7jwDwFIWPbhTM3RqHonDFwpx1/92SycUV2zNcLiuWDrvTRBZlzKMVdU1nweFZb6NDJMLD7E9xHr6xm6YObxjrVXMyJ8flX4sLxeXMmUVlNe7iQ/1ed+pjnLVQTxlEZA01j/3KWabCzNRKwGrYHcfSWOBCR8CX9zl//uux7xuQRoSEoKkpCR069YNmzZtwpEjR7TcLyKnxK7l4pgctcSRYVV+Ghmmptxdi0y3vJEaAHy177zT5k35pTUHX52bR7r83X17MBN7z+YjLNiAx0ddAWOI5S1CgA6DO8U5BNxK1rDKs8PuMsViMGG0/l76tInF+P6tAQDz1x32e5mpfXl5UXm1Q+XAF7+d4/xWcuBNtszbLufyoNumvLzSu+7lH+w8jeKKanRLiMLI7pZsiVhqXljuPgD1pXS+LmUYtS4vF9m/p7h77/UH20y3/4Jub5cy1QXe7jtnepNLTjuIZ1ouT1/nn/v0S7ZZJU1PKNS99wotqA66J0yYgKVLlwIAysrKcOWVV2LChAno1asXvvzyS813kEhkMgvYmF4zKswbQX7KdHvXvdz3RmoH/8x3erl94Cs/kLxc6vygsrzKhIXW+b/3X9MJ8dFGqTKg0mSWsvmAujWs8sdX7uIEg8ksSM2M5NmhJ1O6ISo0CIfOF2DVb+ecXlcNdwdK9tnBwvIqXLQG3eIB19KfTvqlIzXVX4IgYP+5y263kTfOMpkFLNl0Av1f2GjT5fzKf2/EC98c9ngAX+KijLyoXH15eUlFNd7dbslgP3hdZ+j1Onx38AIOnCtQdH3A+9J5T40eAaBpRDCyCsv9HtT4o7wcqKliEk/2+jPb7ExtlZcDNUuZguyC0/ow8UHc92ijbWWCq32vjSkFVJ+5OTJKfdp/peZittno36rA2tEwT2KpDrp/+eUXDB8+HADw1VdfwWw2Iz8/H2+++SZefPFFzXeQSPTr6TzkFlciJiwYgzvFeXUbwX5b0+3FnG4fG6mZzAJ+Pn7R6c/kgW+Z3Six/NJKp9d5d3sGzueXISHaiL9dbZlEIJ/HK5/zqmYNq215ufPHKg94w0MM0v+bR4XikZFdAAAvbziGAhcnDJTwdKBkn5U6falEaj5kf8DP+a0EWAK0+z76HYtSj0mX2b8FyLNlG9Ky0Pv5H/DGpuPItwvu8kqq8L/tpz0ewLsaGWYTjCssL/9091nkl1ahQ7MIjO6ZiNS0TDz46T6vDnfUloG7yzCK8kqq8OhKZUGNL5lHrbuXi8STjFHWYM6fHcSdkVc81EbAn5KciMhQy/u3Tgd8MmMgXh3fGxXV5jqfDU5JTsRM6+eeQQd8NvMqp9MqONObvCcAhectncb9JWkskLKg5vtp3wLjP0BDzRzXN6qD7oKCAjRtalnbkpqaittuuw3h4eEYPXo0Tpw4ofkOEonE0vKR3eNtgkE1DFJ5uX+6lxtqcU73now8txktMfD9+XiOzeX2B/uApUnYWz+dAgA8mXIFwqyBb4hN0F2zn2rWY9oE3S4y3eLaQ52u5mSEaNqQ9ujcIhJ5JZV4Y9NxRfdrT8mBkn3Q/d+fT7m8Pc5vpfQLhRi7dBt+SM9GiEGPf/81GW/f6Tj+rklEMO4Z2h7fHszEg5/udZg44Iy7A/hSF2Xk8pFh5Qoy3eVVJryz9Q8AwAPXdgJgeT57y5sycFcZRmfc/U58zTz6u7w82tqQstKHqiZvlDrpXu7PsujKajPyrWviBQF4bNVB3Pl/u+tNNlj8fDIJwCC7/iUAZ3qTRvzdaVwvez/tMBzoMQ4IifLvfWquYZ4kUB25tGnTBjt37kRJSQlSU1Nxww03AAAuX74Mo5HdHck/zGZBCrq96VouEoN1zed0SyPD1Kzptma6vVzTrTTwPXOp1OZ7Z5nuNzYdR3FFNXq1jsG4Pq2ky+VjhOQHpmrWY1bIgllXJxjEzuFhwQaH32GwQS9lxD7adQbHsooU3bdI6YGSfVO6imr3z5Hamt/KtYN1zxe/ncNf39qOM5dK0So2DKsfGIw7B7XDjT0t4+9eHd9L2lbMYH97UHmw4e4AXh5cu+xeXuU5sF/12zlcLKpAq9gw/LVvK0XdxF2JiwjxutFUSnIiJg9qCwAY3jnOZQDu6neiReZRXsWjZdBdYZfprv3y8prngcks4LuD/i2Lvlhs2wMjy27cY2ZBOe7/eC+WbDou/Q3r0vubvHLE2d+KM73JhpoZsXL+7jTubL/qUj8Ff3dyr8NUdy+fPXs27rzzTkRGRqJdu3a49tprAVjKznv2dD6XkshX+//MR1ZhOSJCDBjWpZnXtyM1UqsDI8OM1ky3t93LlQa+9uPC8u1KtI9mFeLzPWcBAM+NTrJpBmfQ66DTWbIW8oMQNR1f98nWu7oqLxeDBHlpudzwLs0xqkc8NhzOxvx1h/HpzEGKT3AoPVA6qjKYF/mzu7KS7vBUe8qrTJj39WGstPYXuO6K5nhjYh/EhodI2xj0Ohj0XvcolcgP4MXlNGazYBMYlHnZSK3KZMbyny1Z7vuu6Yhgg96n53FpZTVO5hTjigTvsilZ1ud3m6bh2HryksvtxN/J+9sz0CwqFM0iQjF/nesTajpYgvQR3eLx+5nLyCkqR4soo8MUBvl720XrGnL7bdQymQXpdqNCLZnuqgBmugHg75/udfhd+dKB3t5Fu8aTrryx6QQ+23MOt/RJxLoDmXXm/U3+eqqoMktNPUV1qeM+1QE2Hbzl/9fB+ZpknaXDeLsh/t0vp+pQ0L042b+d3Osw1UH3gw8+iEGDBuHs2bO4/vrrobceXHTs2JFruslvNliz3CO6xzt8EKpRl0aG+ZrpHtihKYINOpssjZwY+CZG2wbn8vJyQRDw7/VHYBYsFQT22SqdTodggx6V1WaUV1nW5YkHrv8cnYS/f7rX6f0CNR1fbeZ0uzjotO9c7sxzo5Ow5dhF7PzjEr5Py8JNPZUdlCk9ALpU7Hytuyf+6q4sZvD8eZBMyp29VIoHPvkdhy8UQqcDHru+Kx68trPDxAKTWcDLGs6Wlz9/7Zv92ZaXKw+6v9p3Hufzy9A8KhQTBrQB4N3zOCHaiPBQA/64WIJp7+7Blw8OQavYMNW3cyG/DAAULxt6Yb2yaSlikH7Vgs3IK6l5fcsDO0EQbMq+iypMmLxil8/Bn/wEY3SYuKa7drO49kG3p5MT17sYJalUTqHyYDOrsBz//UXbMXS+kr9uLH+/YJuf16WO+1TX2Qfe1tdVykJLp/HaZtaugsdnYif3CR82usBbddANAP3790f//v1tLhs9erQmO0RkTxAEfJ/mW9dyUZDYSK0ujAzzsZGaQa9D88hQXHCSxZUHvvZzZ+Xl5VuOX8TWE7kINujw9I3dnN5PiDXovu2tHTblg00jgnHv8A5Yu++CzeUJ7uZ0uzjBIK5BdZXpBixZsPuu6YQ3N5/Av9cfwXVXtJDWnrvjbUWAJ/6c3+qpJF6rg2RSZmN6NuZ8sR9F5dVoGhGCNyf1dVlx40uZtjPy5688mw3YBgnFdo3UBEFwWg1iMgt4e4ulX8HM4R2kE12eqlcAy/Pukb90QYfmEVLGuKi8CuOX78SJnGJM/d9urL5/CJpEhLi4BUcms4CMiyWW2/e2XNMDecAN2AZ21S7KmX0N/uSTGqKM1jXdtd1ITcEyA8B5VYU37EcseiOQ72+2Qbfj34ozvcmGzfuV7P8TPnQxp3uh/4NMswm4eMz2ewCoLHW+vbdMVcCh1ZZS+XZDVJ5IsL7KU58Guo0OzEmIAFF0lLlw4UKUlZUpusHdu3dj/fr1Pu0UkVx6ZiHO5pUiNEiPa69o7tNtBVkrM6pMgs0ILF95NzLMt0ZqQM1BapzdQa581Mlla5Atrs8Wy8urTGb825oxmj6kPdrFRTi9D/H3ZL9eL6+kCiu2ZmBY55qDtJt7t3To+KpkTreYkQkLcX8e8IFrOqFVbBjO55fhbTeNzuQ8jSYSxzk1i1QeKIh8nT3raj0j1w7WDdUmMxalHsXMD39DUXk1+rWNxfqHh7ld4qJlaan9yKzSCtvXj9ilvNpktgnyBMH1+8p3hzKRkVuC2PBg3DmonXS5vJu4q2f0sjv6Yvb1XXFLn1YY3CkOBr0OseEh+OCegUiMMeLUxRLc88GviueEp6ZlYujCH5FrDYrf33G6VpYeiu/889cdxgvfOm8e52tjLLGJWrBBB6O1qimQjdSU8OW5azIL2HvW/eg8pQL1/lbmof+Ip477AuruPHLyA1fHkEljgdlpNd93uQGYfcj/AXf6Okvp9i8v11y2OBnY8jI0H8FVWQR8OQP4YIzlPlTPH6+FTu51kKKgOz09HW3btsWDDz6I77//Hhcv1owpqq6uxsGDB/HWW29hyJAhmDhxIqKi6luXPKrLxAZq13RtjohQr4ozJPLGYFo2bPFtZJj3B2Ji5uuTewdhZPcWAIBb+9oGvmI5edum4ZbvrUH353vO4mROMZqEB2PWiC5Ob99kt4bUma/215zNjQ0LdjjgqFQwp1sMHsKC3b8lhYUY8I/R3QEAy38+hXN5ns/eujtQklcEqPk7JGowe9Zdx2WuHQy8i0UVuOt/e6Ss8N1D2+Pzvw1GYoz78mktS0vtR2ZtTLfteltmff2XOAmunAW+ZrOAZT+dBADcPaSDw/up2E3cvgN7YowRy6f0w029Wjrdz5axYfjgnoGINgZh39l8zPp0r8exjOLyCftmW7XVR0sAkFVYgexC19lZX4I/Meg2BhmksvlAjgxTwtvnrvhetmbvea+u70ptv7/ZBt3Of3fuOu4b9ECr2HCP91OXmseRn8izt1EJ/s/mpq+zlGzLs+uApZT7l4X+vW+xXFx14A3/d3KvYxRFMB9++CEOHDiApUuX4o477kBhYSEMBgNCQ0NRWmo56O3bty/uvfdeTJ8+nV3MSVNiafmNPX0rLQeAINmawWqzgCCN3gfN1g9NNWe4a9Z0e9/JUVzHGR0WjL5tm2DTkRzo9Xqb/RDLydvHReDUxRLkl1aioKwKb2yyjPh79PquiAkLdrxxWDKuao4H7DuAA1A0p7umvNzzW9KNyQkY3DEOO/+4hBfXp+O/dw3weB3xQOmhz/bZrKuUl8J/tuecx9sBgKs6NsUn914Fg14Hk1nAnow8lw2aXPG0Xnv2SOcnQexx7aB//Ho6D3//ZC9yiioQEWLAott7YYyLgNPe5RL1JbZ6nedgM6ugHP/+znYtsxgkiK+7YIMOOp0OldVmlFaZ0MTuNjYfzcHRrCJEhgZh+pD2Tu8nJTkR1yclqH5ed42PwrvTr8Sd/7cbm4/m4NmvDmHRbb1clri7Wj5RF3kT/IknGEODDdLSldoOuu2XI7jiS1m0q/cyLdT2+5vNXHM3J2FTkhNx+lIJFn5/DFe2a4I5N3TF+9tPY0N6Nh7+fB++fWiYywQBm2M2IK4qG80m2wyuv7t1m02Wcna3M1r8SVYurpa/O7nXMYrThr1798aKFSvw3//+FwcPHsSZM2dQVlaGZs2aoU+fPmjWzPuO0kSunMwpwsmcYgQbdBjRzfcXZ5DswLHK5Nid1FviAbOaNYk13cu9OxCrqDZJAWREaBASrdkpsSkRYDm4PZVTDAAIsWb5c4sr8I+vDiGvpBKdmkdg8sC2Lu9D7cGmsxnEtkG3+zndStZo63Q6zB/bAze9uRUbDmdj64mLGN7F87KDlOREtIo9itOXStE8KhRvTuorBRNi8Oz2fmH5aGkVGw6DXqf44Mk+MO/fronH9dqf7TmLhGgjsgu5drA2CYKA/9uagYWpR2EyC+jSIhJvT+mPzi0iFV3fZBYUN/kCaoLtZpEhyCmynBwz6Cxzgh32zcn1xfJhMeiOCA2yTBqoNktZcPljW2rNct81uB1iwp2faAMsJw+9Wds7oH1TLL2jH+776Dd88dufaBFlxOOjrnDYTs269xlDO6BlrFHV7xWwlObnlWjTPMib4E+cSmEM1iPEEJjyciWZbvvGl2r46+RJoN7fPJWXy1VaR0p2jo/E4E7N0D0xGgeXbEVGbgnmrzuMV8b3drgOm2M2EouTbTPOh78CuqZ4V14uBvDF2a7XT5/Z4ZjhrnXWcnHFAtnJPXBU1+rq9Xr06dMHffr08cPuENkSS8uHdm7mMhurhjzoDnh5uY+Zbvm83ogQg1T2Kh7M2geF3x+2lPFUmgRpZnBKcoLbjsFqDzadZVaUzOmuKS9XdhLkioQo3HVVO7y/4zSe/yYd3z8yXFHn46Jyy/7pdbAJKvZk5HksoxefLfvPXsaSTSeweNNxlwdPy+7oiyYRodiYnoW1+y/YNHKKMhpQVO76vsSy10dHdsXiTccdfu7LQTK5VlhehSdXHUTqYct7zi19WuKlv/ZUtaRFaTCZkpyAaYPb4/UfjuHXM5elgBtwHnDbC9LrUG0WpKBKPNkVERIEsyCgoKzKYT3vtpO5OHAuH8ZgPWYM66D4Mal1fVI8XvprTzy95hCW/nQSzaNCMc0uq67mZN7NfVqiZ6sY/N+2DLdN3kKDdKiwBkJNwoKx65mRuOaVn9w2vYqPDoXJLOCii8kFvgR/Unl5cE15eWUtdi83mQXp/U703JjuWL7lFHJlj9e+8aUaWjcNBAL7/uapkZqcWLUl9maJDQ/BGxP7YPKKXVj1+5+4umtz3Ny7pjqGzTEbEfsAuKrUu27d6etcNGSzG7dV70q0A9zJPYB8HyZK5EdSaXmy76XlgG35t5bjW8R+Gt40UvM2+1FoXasdbNDh19OXpUZg5/JKsWTTcTzw8V6PB0Rv/XQKqWmZLn8+sENTVScSiiscg0l5x17P5eXK34AfHdkVTSNCcDKnGB/sOK3oOuJBqH1AoiYIOJVbgjecBNyA5eBJADDrs32YvGIX3t1+2qFzsruAW659s3C8PaUfYu1ONiVosJ6cbB3NKsQtS7cj9XAWgg06vHBLDyye2Ed1Dwmlz6MOcREY3CkOTb1o3gcAUdb1pOKJoiLrhAKz7ESifZZz6Y+WLPfkgW3RLDLUq/tVatLAtphzfVcAwPxvDmP9Qdv3GDUn8+KjQxU1eRMDbgC4XFaFzUeyPfZymD+2Bx64trPbbbwN/sSgzRisr/Xy8tS0TAxd9KND4Gg2CVg8sY/0/d+u7uDQ+FINpc/3pMQoqRJLlBhjxH1Xd3C4XP7+Vttrn+UnXj19LldIywdqDqOv6hiHWddZnk/Prjlk03OEzTEbOEHBazv1aeWl5u7WaNuvn65vJdrRLRvluDCAQTfVYWcvleLwhULodcDI7tq8qVjmTms/Nsy3kWHO98PdAUdqWiZuX25ZM1RlEjB5xS6MWvwLAMta9Tc2nVBc8ueuO69Br0Pbpsrn7jpd0y2f0+2qkZrUvVx50B0THownraWrSzadwEUP42rKq0zSCYDSSpNN93qt1w5qcWzYIsqIlORE/H1ETVAw5aq2Ph0kk6Mvf/8T45ZtR0ZuCVrFhmHV/UNw1+D2Xo2vUvo8EhuVNQm3DboVjqmWTsSUVpqQmpaJh1fuAwBkFpZLB/a/nKhpePrr6TzszshDsEGHv13dUdmd+OihEZ1x11XtIAjAoyv3Y8epXOlnniYKyDW3niBw1eTNlQc/2QsAeHtKP6l7uEge2A3qaMli2791+3pyS6zwCZU1UquN8nKpQZ2TAO+l749i64mav0ObJuE+ZVWVPt+vSIjCtqdGICkxGoDlubHtqRF45qYk/PjYtdJ2I7q1kN7f3DWa9BfHOd2uldtlukWP/KUL+rWNRVFFNR75fJ/UUJDNMRu4Ik/ZZhXdupWs0ZYH8O2GWAJZt++o/g73dEB0K2WbeurkbjYBpXm23zcQDLqpzko9bPlwHdQhDnEaZmbEsWHVGma6vSkvF2eGF5ZVOg2qXR1wiAdVuXYlkd4EekrOrreI9jHorta+vFw0fkAb9GwVg6KKarycetTttoXlNes7TWbBJgM/sENTm872gSSOMBNLWuVz1WPDQlh6qJHyKhOeWXMIj606gPIqM67p2hzfPjQMfdrEen2bSoNJ8W8baxd0926t7L4vWAOqymozHvh4rzSRQG6ZrIpFzHLf3r+1x+7rWhF7L6T0SEClyYz7Pvwd6RcKASibKCCSN79MSU7EtqdG4LZ+loM78cSlMwJqynWvbF9THp6UEIVXx/fG9UmW6imx4ikxxihNgLitXyufT26JjdQsa7p11vvyb9CtZI3157/WNIz0ZXIGoPz53iU+Cga9Dm2sJ3Djo43S+1i5LLscHmKQ+mU4q9QSl+/4I/AWBNtJHa5OENv/3P6ETpBBjyWT+iIqNAh7z+bjzc2WhqVKT1CwOWY9VVWibDslpeAe12jbBfB6g6XkHIDLwNuvhw2ycnEl3JWUi2PPLvxec5lXI8nqJgbdVGdp2bVcLsgPB0Bq53SnpmVi+ru/AgCKKkxOg2pnBxz3f7wXT685pHnjGndn18UmQP3axjr8LDHGiCvb1/RI9txIreagRp7JP3fJUoanprwcsBy8zx/bAwCw6vc/sf9cvstt7dc3yrMaBr0O8XXgYMdZSesl2cmV/DLna09JnXN5pRi/fCc+23MWOp1lqcJ7069Ekwjvyr1FSsqgAUj9KZpG2C4dGNRRWfMy+WvK3XvB89+kY//ZfPx8/CIMeh0euMZ5KbW/GPQ6LJ7UBwM7NEVRRTWmvbdHKrlNSU7Ey7f3crhOfLTtCVb7KhyDXoeerWIAeA4axROKZ2VlvulZRbjz/3ZL77di9jk0yIDOLSzjTmPDfT+5FYiRYUrWWBeU1Zyg8TXo9jS3WhRttDzPo6z/yk+AyvuAFFdUe1z7DFie15XVZk1Lz+3HWXpe013zvLHXpmk4/n1rTwDA0p9OYvcflzyeoLA/2Ur1TLDnUXEAlJWCK12jLd8uaaylZDss1vm2SsrfvaVVubiakvp6yuug++TJk9iwYQPKyiydkgVXQ+KJvJBZUIZ9Z/MBAKN6aBt0iwdA1X5opGZQEHSLQfXFYttyaE9BtXiZs6yWr9ydXRczwNF2a4u7tIjAtqdGSAdSgCXTbf9e4GxOt30mf+tJS8njH7kKzxbL9G/XBLdaM1/zvk6zWdcqZx9026/rDrFmLOJ8DLx84ayk9ZJsTXhBmbIRQOTaj0ezMeY/23DofAGahAfj/bsH4pGRXVQtDXFHSRm0uFZcnukONujQr63lBFbTcG2eg5kF5XjqywMAgLG9W6JtnMIDQw0Zgw1YMXUAuiVE4WJRBaa+uweXrO997eIiAFi6ty+Z1AePjuwK+9MVzkqKz8iCaE82pmfhzCXH7cWs6Q7re09IkF5aK19c7vvrTN5ILcTDUiKtqC1N1qLcPSU5EbNHdnW7TaT1+S4G34Wy9zH5+3BRebXitc9XLdisaem5fSPNSk/l5dLyAeeH0WN7t8Tt/VvDLACzV+5HUXmVyxNybI7ZAER5Ok61ll8r6datdI22/XbdRgPBtVPJJBn1Uk25uNIycGfbqS2pr6dUB92XLl3CyJEj0bVrV9x0003IzLS8yc2YMQOPPfaY5jtIjdMGa5a7f7smiI/WNgMpdjDXNtNteVPwFHMrOYvvj6DaFSVn18WTFMWyzt8AYLDOA5cfoJoF9xmDimqTy0w+AHy+55xXB05Pp3RDZGgQDvxZgNV7/3S6TWGZ7e+11K7TutjM7f+mDcBnM6/Ckkl98M/R3VXvi7eeHNXVaUnrJdnJGXmpOaljMgt4dcMx3PP+bygoq0KfNrH49uHhuKar53Fzaoll0J/NvApvyBpXicQgRL6mu0l4CLrGW0aT5Wn4dz6WbRkZKGaHAyEmLBgf3DMQrWLDkJFbgns++A1F5VXYmG55n28VG4ZgvR6LNx1HVqH7kmKTWcDafcpH06zd77xMU3y//XDXGQCW9znx7+KsYkctcRRkaLC+1jLdakuTKzXaH/FxDe0UhyWT+mDCgNY2P5eC7jDLvzaZbtnvuri8GpuszwlP7BtU+lp6bh90K810uxs7+vzYHujQLMJy8mv1QcSEBePuoe0dKmrYHLMB0MnDKRcHgkq7dXtco+0igA/E6LDI+JrHpGS9OgCc2e7kMpUl9fWU6qD70UcfRVBQEM6ePYvw8Jqz5hMnTkRqaqqmO0eNlzi2R6uu5XJi0K3tmm7Lv57Ky/0xYsVbSs+uB1vP5IsHok2tBwxihkJ+ACXfTiQvKS+vMmO+hzWH7hq7udIi2oiH/2IpnX059ajDPgGeM93iQXJkaBAGd4rDLX1aYfrQDoobPvlqdK+WTv8OeTaZbsvjqu2uvvVdbnEFpr67W5pTPW1wO3xx32C0ivVfVkCcdf3Xvq1slk2EBRukv7O8vLxpRAhaNwl3u07ZFy98m+7XJlSexEcb8cE9A9EkPBgHzuVjwIubsGJrBgDgwJ8FmPWZ4/xiwLakWJx5f1nhiUnLrG7XJzAE1Ly+gg06KTh09v6hlrORYVpOzHBGyRrrMNkaZE/rlpUSK5X+2q81bunTCl3jo2x+Hmm0z3RXSe9fv52+LG2XU1SO/20/7dU+2D9P1LLv9q98ZJjr12tEaBDenNQXBj2wIT0bk1fsliZaiG/1fdvEsjlmQxNt97cMCgOufQYwVQIZW5Vla/tNh/Osr5txW8e+82JnfSTPtiu9/yInJ9a8Kamvh1R/uv/www9YtGgRWre2PZPZpUsXnDlzRrMdo8brUnGF1NhL69JyoKYxj5bl5WJJtd7DK6oudSZVenZdXNMtBq1idk7MFNsH2fbN1OwPXpx11ZXzdmzK9CEd0LF5BHKLK7Fk0wmHnxeV22e6bT/4xIMueeZC6ZpFLdifFBDJ13QXlFUFpKtvffb7mTyMeXMbtp+8hPAQA5ZM6oPnb0mWSn5rQ6Rs9JgYgAC25eVNIyzriDs2i/DbfngbkGilc4tIzBxu6Z7uMM7KzW7JGz6qeQ/9ax+F3XRhyXRL5eVaZLrFRltBBikw83f3ciXvVx2bR0r/rzT5XqpZUFqFQ3/mAwCGdW4GwHEKRU2m2xJ0/5CeLb1//fu7I9J2vlZ5+TJ2yz7o9jgyTFrT7f595Hx+KZwVFIjP95LKapaUNzSz02y/1wHY8hLw5QzggzGuG4OZTcCWRcArnSzbO6PTAUMeclw/nb4O2PWWJruviphtN5uAgyuVXcdZ+by3JfX1jOqjjpKSEpsMtygvLw+hof6d/UmNww/p2TALQHKraLRpqv0aRLGRWrWGpX7SyDAPmW5fO5PqAMSGB3vczt6Adk3wyYxB+OTeQVgyqQ8+m3mV4rPr4ppuMWgVM90l1pnc9geo9t97c6DpzcmJkCA95o6xHHB+sOM0TmQX2fzcXSM1eeda+3JBcY2ufdMrV7ytzigsq3LIYJdWVqNI9vu8WFRR61196ytBEPC/bRmY+N9dyCosR6fmEfj670Nxi4pATCvyoDtK9v9oWT8EsyDgu4OZyLikvq+BEnVhDrDJLOCjXd6fnM8pKlf8HpqUGIWRScpfiyFBeumEiLZrumuvvByoeb9yFsdNH9LOJkjU4iTAzj9yYRYsJ1TEPgb2UyjE5//JnCK396vV6aCconLV1UCO5eXOm36Kt1XTnd51ubC4nMydjNwSVirVWy6O9+wz0FVltt87awyWvq4m2C67DJcEM7DjP7bXldZDB0Dal5bs/eltQOklZddpM8jxMm9L6uuZIM+b2Bo+fDg+/PBDvPDCCwAsY0HMZjNefvllXHfddZrvIDU+UtdyP5VbBev910jNU9Atlv9lFZS7LByKCQ9GQWmVw8/FW154a08s/ekk0s4X2vxcr3OdLYoJC8bQLs08Pg5npDXdFbaZ7rIqE8xmQQpmw0MMKK00ecx0K+HtyYlrr2iBkd3jselINp7/Jh0fzRgozVu2LxmVd82V76P9CBjAciA7ols8es7f4PbxJMYYMeWqdtJzWI2fjuXgsVUHbALqFlG2JzLts/MiAZbnhzgiqbFnToorqvHU6oNYf8hyEmJMr0QsvK2XTfBbm+TZbbGJWmpaJuavqzkg3/VHHnb94f+AOJDVNr4ur2kRZek/kRBtdFj77YzZLCAuIsSmGaGcDpbsa0FZFUIMekSFWk6CaJHpFoM2S3m55fWo1Rpqd0xmAUF6vfRZsPDWnlj56znsO5ePpMQYmzndWgTd4u2JWW7AMeiOCA2CySzgi9+c99vQ2uncUgxb9KPNcy0xxoh5Nye5PNHsak13alomnv8m3eG2xM98d5luJc/3KpNlycTgTsomF1BDIGsM1m00cHQ98MVd6m5CvK7eEJi13KI1My3/uuqa7ozOyWtGHHv2xVRY3pntD2YFoN807/axDlGd6X755Zfxzjvv4MYbb0RlZSWefPJJJCcn45dffsGiRYs83wCRGwWlVVI32RQ/rOcGAjsyzN1IIXlQ7SxTIZaDD+/SHMetzZFevq2XlLleMqmvy/vdfDTH6yyoGHSLj1HeBCavtFI6Sy9mOeTBbLXJ7HAWv3lUqNs1h76OTZk7JgkhQXpsO5mLDYdr1v+4W9MtP1Fw4Fy+08xDSJAeV3kY6TTv5iRc1THOYeyREiu2ZjgcoOUUWZqoKVnnWxcymXXB8ewijF26DesPZSLYoMP8m5Pwn8l9AxZwA3bl5aFBUjNBJYGj1gI5B9jbgF/e8NEyJtDzko/0zCLc+b/dNnOg7W8TAG6yjqQMNmid6ZbN6Q6qnUy3uPTk3g9/ky5bsvkEBOsBbKXJbDMBQYtu6ttOOgbdRrvy8ihjEPZk5LldX69U04gQt2O3YsODsXjTcdXVQGWVjhVa7sZ3Zhda3pvdZbqVPt/r0rIzqkWF5y1ZYtVZarumYnVhnXNZvvJtXY0vE8ee2a+JF215qd7P7FYddCcnJ+P48eMYNmwYbrnlFpSUlODWW2/Fvn370KlTJ3/sIzUim45ko9osoGt8JDrJ1p5pSVrTrWUjNam83PO2rkYKJcQYseyOvogJC0FhebVN1vqRv3SRysG3nshFZbUZbZuGY/wAS+OagR2a4iXZ2jhnvF3Pab/2NSYsWOrSLq7P1umA5pGWQHPf2XxZeXTNAa+Y7fn7dZb3CVe/qn+O7u5TprZtXDj+Zl03+uL6dOmg2z7TLZaXp6Zl4sYlW6XLp/xvj8s10mJ36Qgn88Tbx4UjJTkRBr3OOvpIO2oOjhvzAdzafedxy9Lt+ONiCRJjjFh532BMH9pBqnYIFHnQHRFqcDnBwN/iIkICOgfYm4DfWcPHlORENI9UNlatxPo6D7J7TxFPYopNv4KDZN3LK6tdjh5UylkjNS0yy67Kpt0FiPvPFQCwBP3yKQ6+7s+5vFKcuVSKIL0OV8kytWF2fTFCg/Q+vy+JJ15evCVZ+t7+5wIsn+uuGvIJAOavO+z0c9A+011eZfI4aQSoOYnvjNLneyBPhFGA/fau91lqMdiub+uc3c0MTxrruCZerp7P7Pbq1H9MTAz+8Y9/aL0vRFLXcn928gwWu5ebtR8ZpnTWb0pyIq5PSsCQhZuRXViBuWO6o7CsGs9+lYb8MsdmMs2iQqUDzo3pljfa65PipWBCSRmbmAVVW8YWbHdQYQzWIyIkCMUV1cgqsKxVkh9U/efHk9K28bLy6GhjMC6VVGJg+zi8PaWfQ8me6IX1R6DX63x6Djx4XSd8ufdP/Hm5DP/9+Q88MrKLzWxYwJLpFg9U7Q+sxKyIfaM5Mavx136tsOFwFi4WVWLKVW3x8a6zNidJxN+xfZGUs6IprTXGA7iKahNe+DYdH+86CwAY3qUZFk/sg7jIutFnRF5eXl5lDtgEg1v6OO+QX1s8La8BHJfJJLgoC27ZJBwXrU0GB7VvghMXS9xmUqvNAkZ2b45NRy6ib5tYrH5gCAx6Hd755RQAy/uc2EhNECwVO1FG9f0zRFLQHWSQZbp9e/W7KnX+5+jueGH9EY8BYmFZlU2Ju6/l7mKWu2/bWJsTS/KgOzI0CDqdzuf3JQGQngdv6x0/P2LCg1FlMntcGpBVWIGlP57EIyO72FxeVmn7u8gsKFP0Oj2eVYQeLZ2P41PyfNdZt6MGRE1AeOpH7+9HDLbbDQGCI4Aq//QD0Z4v74PWxXTy8vp6xKv2reXl5dizZw++/fZbrFu3zuaLyFslFdX45fhFAECKH7qWi2rKy2t/ZJicQa9DbJglW/P6xhNYvPmE04AbAL7eZ1kLV20y48ejlqB7ZPeas5v+LGMTszSi0CCD1J32sVUHAVgCiYzcUofrZlvLo/W6mo62FdUmpCQn4tkbuzm9Py2agoWHBOHZmywztt/achJ/Xi51aARXXFHlMZNhXx0glnlXVYurqIEhnSxllZdlB/xi1qRpRDA+m3kV/jmmu83tyrlJlKiiZOZ6Q/Tn5VJMWL5TCrgf/ksXvH/3wDoTcAO2zdMC6XoVjcX8wdPyGh2ApZP7WpfLuG/4GBtWExC3i4tQVLq86Yjl80Wng3TyQcz2hhj0CA3SSxlxX9d1i+XlNnO6fcgsu8tkP/jpPkUBYnqmbR8QX0eGbbOu5x7a2bZfiLx7uRiMi2vxPfn7dZ1gdLKc5qmUbtLzICU5ET8+dq30sw5x4SgorZKae3ryxqbjDp8vpZX2DUGV3Za78XLunu8ivR6NvgdHg/PFVOXbVhZ53sYZnb6mcdnR9fUo4Ib7TDegYBZ3/Z3ZrfpIIDU1FVOnTkVubq7Dz3Q6HUwajKCgxumnYzmoqDajXVw4uidGeb6Cl4KlkWHaZbqlkWEqPzvLq513ALf325l8fHcwE3GRIbhcWoXY8GBc2b6J9HN/lrHZB93GYL0lFYSaudGeCAIQYo0uK6ot67xfdFEOr1VTsDG9EvHxrjPYnZGHf69Pl0rhI0ODkFdSiYyLJW4PVOVrpMXMtZjpLq82SZmsROsygaKKalRUmxAaZJBK18NCLDO/u8ZH4oVvnT9eLc79KJ253tBsOZaD2Sv3I9/6mnhjYh9cd0WLQO+WA3mmu2mEsrJordWVEzLi8hr7TKWrjLYrsWE1v9PLZerWCl8urdm+0voCDAnSQ6fTIdIYhPzSKsu6bucJTEXKZY3UxEx3hYfMsjiHXOzSLq5hF7tge8pke5JjXYcs8rQ/7pjNArafshwHDrdr0mmf6QYgrcW//+O9bm+3e2I0OjSLwJGsIvxteAdsPJKDjNwSXJFgu9xM3jvkfH6Z6tyZ/eeL+H4eGqRHRbVZ8Wd5YkyY25+7er6LTGZL2b/95yzVZyqfjWFNrGuiVVxPMAOrpgPC+8APz6i7v0ATPDzOBjyzW3XQ/dBDD2H8+PGYO3cu4uPr2ToCqtPEjs8pyQl+XYMpfshquabbpLK8HLAcYGXml3ne0OqfX6dhXN+WAIARV7SQ1qYDyrqiJ3h50B1idzAQbNDjssp5qgIgzSqtqDZjT0ae1IjG1fbelsOLdDod5o/tgZuWbMX3aTVvzmfzLBn5UxeVnRmWVweIme6KKrOUJWoRbZQOjPNLqxAfbXAYJyMPuvxBbcBS35nMApZsPoH//HgCggD0ah2Dt+7sh9ZNtB8xqIUIWaa7S3ykx5JTV1pEheJiUYWq69XFEzLi8hpnAaYSqWmZ2HgkR/p+Y3qOm60d5RbXBN1iczMx6IkMtQTdRRpluuXdy6tMZgiC4PTzzVXp+LybkxATFqLJkgT73hC+rOk+fKEQ+aVViAwNQu/WsTY/kzcXiwit+X9KciKahocgr9T1SZKi8mrpPXrClW1xNLsYGbklyCux/cyRn/Ct9OKz3P7zRaxOig0PRnZhBYxBBkWvU0/NNYGa5/vz6w7jw11n0KZJGM5drvnsL68yMehuzAbeB/y80LvrfvcYUOqYBA0MhQvozmwHKkss5fHthjiWiDfgmd2qX+XZ2dmYM2cOA27SVHmVCT8dtRw4+WtUmCjIHyPDrMcuasrL92TkqTpYuFRSiW8PWkriRibZvv6UlLF5e9Btv6b7/OUy73531pupqDLVWlfXM5dKXH4EHMtWVtYlrw4ItR5MllRWS+shw4INUqnrl7//aWkgZz1gFzM+oUEGqZeA1h4d2UXxzPWGIK+kEtPf24M3N1sC7ilXtcWq+wfX2YAbcJzN7em16spzo7urDtTFhmF17flh0OswuFMcbunTCoM7xakKuB/4eK/L8XlKFJXXNEoTS77FoEdcx+1rB/MKaU23XjpxKQhw2sTLXen4/R/vxWs/HPNpX0RVdpWIldXe/w63nrSU6l/VMc7mBDBgV15uty4+PsZ5tZX4999x6pLU/K5lrBFNwy3Xv2y3fKBQYZWVO/LPF3FNt7jkq9Jkll6n9uTP1HAnTTWdMeh1SG5lKZ0osyvrL/exzJ/qKx0Q1hTY96GX1xfqUMANKM7UfzYJ+HIG8MEY593IPc7irr8zu1UH3bfffju2bNnih12hxuyX4xdRWmlCyxgjerf2oabPA5NZQKG1FPFEdpFX3bydMXtRXu5NQJldWIEQgx5Xd23u8DNXXdF1AGaP7OL1es5gu/V1ZVXeHYyGS2u6zbXS1VUsyfSWszXSYgZHfsC39cRFKevy8oZjmLxiFx5bfcC6fc3vzn6MjlY+//WcX263Ltp79jJGv7kVW0/kwhisxxsTe+PFcT0RGlS3m6nYz+l29VpNjDHirTtq1jSP7d1S+llYsAFj+7TCo3YNoFyZdV1nt2ui6yN3ZdZqFVjX4oqZbnH5i3iCxPc13Y7l5YBj8zIlpeO/nbms+H5djaIEIGWLxcfoy8iw7Sedl5YDsFmTHRlq+9qMCnX+WhX36ZsDNZ2c//Laz9LfyT47rnRpkzvyzxfxcy3WGuRXVJul16n9yMYW1gahQXqdwwkHd8SKl7wS2yovV2PtqKETgLK8wM3Y1tpVD6q/jrNu5G6bo1nf0VIW1rsmaoAX5eVLly7F+PHjsXXrVvTs2RPBwbZnMR9++GHNdo4aD7Fr+Sg/lpbbl++9u/00vk/L0qQstyboVr7v3gaUgzvFuZw5LJaxvbQ+Hf/bfhqA5cDtjU0n8Pmv57x6rPZlb828aFAVpNeheWQojqAIFdVmDOzQFE0jQlw2P/KlHF6kpKO7K65KcsWDL/kB3+zP9zscMOdby+/lB+72FQNa8bUMvz4QBAEf7DiNf393BFUmAR2bReDtKf1xRYL/ej9oKTy45vV6/nIZTGZBUYn1n5fLAMv5GykYaN8sQtF9domPbHDPCV9e0/a2Hr+IsX1aSUGwGBhrNau7vFpeXl7zHlpVLQCyZf1KH1NEiEHKANsT3y//OToJc9el2ZTPJ8QY0adNLL5Py5IC1+ZRoSiqqPa6vLy8yoRfT1tOBAxzEnQHGfQINuhQZRJsPqtS0zJx4M8Cp7fprIloVkG59Luxz3TL34PFV4zSkzH2ny8ms4Czl0ql/wOW0nuTWUBMWAiijUFSp/x7h3XAHYPaYsRrPzsE456EW0842J/rtx9XRo1EUChQ7XqZXb1jjPXiSiq7kUe3tATcSWO9uK/AU53p/uyzz/DDDz/gyy+/xH/+8x+88cYb0tfixYu92olly5ahffv2MBqNGDRoEPbs2eN2+1WrVqFbt24wGo3o2bMnvvvuO5ufC4KAuXPnIjExEWFhYRg5ciROnDhhs83evXtx/fXXIzY2FnFxcfjb3/6G4uJir/affFNZbcYm6xgsf5WWuyvf87VTNuBd9/KBHZoiLFj5S1AM2q5Pcr+0Y2N6Ft61Btxy3j5W+zXdPVvF2DTKUSIxxmjTvdyg12HCgNZOt9VqDaovpemuSnLFTLf8ANHdgd6J7GLpIC4y1PvxQ5405NncJRXVePjz/Zj/TTqqTAJu6pmAr2cNrTcBd2paJp756pD0/Rubjktz4D2VWEfJMuQx1iUMjXn2r5bP87N5ljW1ldWW16d8TTfgviu1JyazgBLrCbcjmQU22Wb7TLfSxzTxyjZSh3d74jitm3ol4o0JfQAACdGhUqVDt4Roy3bWN6tm1kyttyPD9mTkobLajMQYIzo6OQlkMgtSF/jC8iqYzIL0Gawmuy5/b71UbBucFMpOivjS4yA1LRPDFv2I7acsnaDFqoKcwgoMW/QjJq/YJQXcgGV5i7i8yqjyc9DVyfIyH5ZKUD3WkAJuANiywMsrKuxGfudqYPahehtwA14E3f/4xz/w/PPPo6CgAKdPn0ZGRob09ccff6jegZUrV2LOnDmYN28e9u7di969e2PUqFHIyXHeGGXHjh2YPHkyZsyYgX379mHcuHEYN24c0tJqhqm//PLLePPNN7F8+XLs3r0bERERGDVqFMrLLR9uFy5cwMiRI9G5c2fs3r0bqampOHz4MKZPn656/8l3O/+4hMLyajSLDEH/dk08X0ElJeV79qOh1PKmvNyg16l6vOKIM/moMHv+eKz2me6wkCDVGeiwEINUAiyuXxODJvugXqs1qL4EHa/e3tvp/YuZDaXrCavNApZa55bLZ5ZrrSEGWABwMqcItyzbjm8OXECQXod/jknCsjv6+TQ/uTaJgYZ9RYfSE2Dyg3Qx0y02TXT1VtOQR8dp+TyXNzezfG95bYvZyN/OXMbOU5ek90qTWcDOU5fw9f7zNpfbS03LxNBFP0rB5eyVBzD85Z+kINQ+0FX6mK5PSnC6JAEArunaXHq/umR9rnVqESmdyAkOsn22NLdWK3mb6RZLy4d1buZQmSYGseK65dS0bAxduBlPrznk07KAtPMFNr93+/fg1yf0VtQjQf754upkPAAUV1Y7vXzNvvP40dp/Rm2mOyLERdDNTHf95MeGv/WTjwt/PHUjbze0XpaUy6kuL6+srMTEiROh12vTafH111/HzJkzcffddwMAli9fjvXr1+Pdd9/F008/7bD9kiVLkJKSgieeeAIA8MILL2Djxo1YunQpli9fDkEQsHjxYjz33HO45ZZbAAAffvgh4uPjsXbtWkyaNAnffvstgoODsWzZMulxLF++HL169cLJkyfRuXNnTR4bKSMeeN7Qw/vxUO54Kt/TolO2mEFQ070cADq3iMK2k5cQFmxQ9MHbs1W004MukT8eq31ZdGiQHt0So/Czdaa6EoIg6/wtjUmz/HvtFc1w99COXnUxdsdTR3d3ckucn4EWMxtqzs+8tyMDs0Z0RlSYfwLFmDD1J0Hqg3UHLuDpLw+itNKE+OhQLLujHwa0rz+P09MJMCVj8eQnF5qEW2qSxaaJD3y816FXbF3sVK4lJVMa4qNDAeiQXej+dR9vnRktBd1BeqSmZeKb/Zb1lalpWUhNy0JijBFjeydi3YFMp53F5SfnxCDO/n7l+2s/q1vN5AmDXmezJCG7sAIvfXcEv57OQ2F5FaKNwbhYZHnvai5bBhRsd7zW3HoC0Ns13Vut87ntS8tdPn43kyqUyiyswOQVu6Tfu/2abmOIQdH7/Ku398bQLs186g+w4hdLgilUq0w3g+76ST76ytPsaa2FN6tjTdQ04Kkbua7+d/hX/QimTZuGlStXanLnlZWV+P333zFy5MiaHdLrMXLkSOzcudPpdXbu3GmzPQCMGjVK2j4jIwNZWVk228TExGDQoEHSNhUVFQgJCbE5cRAWZpm1uG3bNk0eGyljMgv44bBYWu5doy9PaqNTtnj2XU15OQCEWsvLh3a2BMChBh0+mTEIn9w7CPcMbe8w0zcjt9Rtdswfj9W+kVposN7lGXtXIkINUrbl4DlLxqLYWr4ZGRrsVRdjT5R0dHfFVfZJbWYDsKzv3pORZ3PAZVSxrMCTgrJqbEzP0uz2Aq2y2ox5X6fh4c/2obTShCGd4rD+4eH1KuAG1J0Ac8VZphtw3TSxrnYq14q717T4/fyxPTB/rPttAKDI+v4jZnuPZxXigY/3Oqybziwox39/yfC4NEnpLG37plnyx2TP2UkU+ZKEmcM7oGt8JEorTfjC2lDxorUMW957I8juxKkYdJvMguoKr9ziCqRnFgIAhnauCbq1bHLnjvh7P3zedm34uTxl4zfFE6q+9AcQqwlUZ7pdNJErZ3l5/Xfwi9q5H2MsMHUdkOJtKXcdFdbUczfyxhh0m0wmvPzyy7jmmmvw0EMPYc6cOTZfauTm5sJkMjmMH4uPj0dWlvODyKysLLfbi/+622bEiBHIysrCK6+8gsrKSly+fFnKqmdmOg9oKioqUFhYaPNFvtuTkYdLJZWICQtWNO/SG7WxBtKb8nIAUsm1eNY+KiwEQ7s0Q1F5Fd7bftqhLLW4otptWao/Hqt9+XdokMFhTIrHhy0AX++zZpAOZ2Hyil1Y9pOl7DrCxdl/LbgKTlzxVJprv4ZP6d87p6jcpoP1DV52knfF1+URdcWF/DJM+O9OfLDzDADg79d1wkczBnnVvC/QtDgBZrum2/YEXEpyIrY9NULqdN7QOpW7ouSEg7tthlorfMRmh2K597oDmaoCRvvlOkqDuH3nHDuRi/sbY1cN4+kkik6nw/QhHQAAH+w8DZNZQK6Y6ZYtZ7FfIiTPgqstMd9hXfvcPTHa5nWpZZM7d8Tf++9nbX+PS3884bixE+Jnnxb9AdRmul191jHTXU/JkyxVJbVzn+X5wNr7gbyM2rm/2lKWBxxd736bBhB0qz7aPXToEPr27QsANuuoAfit67TWevTogQ8++ABz5szBM888A4PBgIcffhjx8fEuy+YXLFiA559/vpb3tOHbYO1afn1SvMOBgVbUlO95SyovV5vptp4pv2w9AAwL0SvKGLgqS/XHY7X/u4QG6REuy3QHG3SoNgkOpa5y+510rBXLy7MK/XugJu8SvTE9C5//es7tjF93pbn2mY3mUaHIVlA62SLKaJO1tJTAaqchdDD/5fhFPPL5PlwurUK0MQhvTOyDv7jpX1DXaXECTN6wsKCsEiazYPPcFLOejY2Szu+utlmYegTbT13C72cta7bFWdXejKCSVysoDeLE8m9njykjtwSLUo9hQLsmeOyGKxQttflr31ZYlHoU5/LKsPlItpTptg26nWe6ActynzAV4wy3nbAsKxrW2fZ5V5vNHAU4zrYuVNBtXgfgyvaWPipa9AdQm+kODdLDoNc5nCBl0F1PmTX4uwUZgWqVr53CTGDLS0BQGFCtrMKj7lPQwbyexJjuqA66f/rpJ83uvFmzZjAYDMjOtl08n52djYQE55mghIQEt9uL/2ZnZyMxMdFmmz59+kjf33HHHbjjjjuQnZ2NiIgI6HQ6vP766+jYsaPT+33mmWdsMvmFhYVo06aN8gdLDsxmAalplqDbX6XlQO2sgfS6vNz6oS1mXcKDg3xal+2Px2p/wGYMNtiUySXGhOHZm7rZjGNTY5e1OY4/16Aa9DoUlFXive2nXZ4YMOiAZXe6L821z3S3bRqOimqz9PdzRsyc/3q6pow4uWWMx/XmTcKDMffmHjiVU4yl1qoAd+prB3OzWcB/fjyJxZuPQxCA5FbRePvO/mjTNDzQu+YTX0+ApaZlYu7Xh6XvP9tzDluOXdRkxGFDoOSEg/02qWmZ+HyPpQx7y7GL2HLsoiaj/LIKypAQE6ZoW3dNAMXKpr5tYxWfTAkLMWDSwDb4789/4N1tGTifbzkIzy2ukN5X7U+cNo0IgV5n6U2hJtMtCAK2Seu5m9v8rC43cxQ/CwUAReXVaBIR4lPPj9iwYOSXVanuXq7T6RARYnA4QcDu5fXU5TO+34ZXHcytXUEaQOa3hqyDeYfhzjdpAI83oI8gJCQE/fv3x+bNm6XLzGYzNm/ejMGDBzu9zuDBg222B4CNGzdK23fo0AEJCQk22xQWFmL37t1ObzM+Ph6RkZFYuXIljEYjrr/+eqf3GxoaiujoaJsv8s3+P/ORVViOiBCDzdowf/D3GkipvFzlK0osT8u3zk8NCzH4XJaq9WP1lOmOjw61KXW9Z2h7VbdfVFHtdl2rFpRUD+h0wIhu7jOr9pmNsJAgLLy1p9vyevEkhzzTHR0e7HG9uQAgLFiv+LVRlw96XblcUom73/8Vb2yyBNyTB7bF6vuH1PuAG1C2/tjVCTCxIVWOXVZUqxGHjZH4Oy2yC3jEqRC+eGH9EVwuqXDbVV7UuXmky5+J87XVLqeYOrg99DpgV0Yezl22BN0vfXdUGk0XZPceHhMWLM0lV9NMLSO3BBcKyhFi0GOgXY8FJV31Y8ODkRBd++9TTSMss7YB4NxlyzxuX3p+3NTT8hnqTY8PZ83U7Nf5Uz1RqcWYYW/ff4TaK2mvTe46mDeATLeid4xbb71VWsN86623uv1Sa86cOVixYgU++OADHDlyBA888ABKSkqkbuZTp07FM888I23/yCOPIDU1Fa+99hqOHj2K+fPn47fffsOsWbMAWM4kzp49Gy+++CLWrVuHQ4cOYerUqWjZsiXGjRsn3c7SpUuxd+9eHD9+HMuWLcOsWbOwYMECxMbGqn4M5B0xyz2ie7zqM8beEAPD2/q1AgCM7N5CszWQ3szpBgCj9UNbnPsZFmzQpCxVy/WeIfaN1IL0iJCVI4r7YdDrMLBDU3yfpr6pl7+ztErWG1abgasWbHYb0NgfZBmD9NJJjkQn68bvuqqd9DsPl1UHnMktkUYAxYQ7z3wVlFbhgY/3KjqYr48jog6cy8eY/2zDz8cvIjRIj1fH98aCW3vWyntBbfHmBFhtjDhsbJSedPP2kC6vpBJ//3QfxvZ2sfZa9v9qN3+3XCdN0JQ49Ge+04kK4kmatD/zbS6PCQuWenWomdW9zToqrH+7Jg4l6UpOMi28tSe2Pz0Cj47sqvg+tfDc6O7o3MJyskPecM1Tzw/7zz7AMi2iS7zltrwJup2t62Z5eT0VGhXoPQBCXJ/Eq5ci44H0dc5/lra2VnfFHxS9Y8TExEjrtWNiYtx+qTVx4kS8+uqrmDt3Lvr06YP9+/cjNTVVaoR29uxZm+ZmQ4YMwaeffop33nkHvXv3xurVq7F27VokJydL2zz55JN46KGH8Le//Q1XXnkliouLkZqaCqOx5o11z549uP7669GzZ0+88847+O9//4uHH35Y9f6TdwRBwPfW4MafpeX2DHodOjSLAGBZ16ZVSbMgeFlebhdghIcYNJvDK+9y60tXcHmm26DXIcigtzkYMQlmKQDwtpmOv7O0SoP6vJJKt5nEIINemrcLQDrwFE9yjOzewmb7Adb1g6lpmVj4/VHp8vnfpGPYoh9hNgswBjkPMsVj6BfWH8E/R7vPyNSnEVGCIOCjXWcwfvlOnM8vQ/u4cKz9+1Dc3r91oHfNL9SeANOi6znZUvK+JAg1o9y8IcDSjG3ZHf2kk6mihBgjOjSzVG+4C3LF9d7NopQH3eIJBVf7BACrfv/T5vIoY5D02VNRpTzodjUqTKT0JNPnv571eF9it34t3tUSYsKk6hkx0y3f521PjZCaYr51Zz9EWk+Qin/Fe4d1wOKJvQEA1SZBqg7w5gShPOiOsv6/rLKWx02RNpq0C/QeAINnBXoPtBPdCii5CHxxl/Ofr54G/PDP2t0njSla0/3ee+/hX//6Fx5//HG89957mu/ErFmzpEy1vS1btjhcNn78eIwfP97l7el0OvzrX//Cv/71L5fbfPjhh6r3k7Rz+EIhzuWVwRisx7VXNPd8BQ0ZrDXg1RqUFYpM1qBbbS84x3JlQ52bwytf8xhqnWX73NqaJoqpadkYtuhHzLs5yau5r02t6+v8SW1Q725+sjHYgOIKS4mqPGA26HXoGh+FTUdypMvCgg1uZ/c++Ok+t/shBlhNIkLw9pR+TtfNzxzeod6s8S2trMazaw5hrXUW8qge8XhlfG9Eu1nn2hCoaXhWGyMOGxulv6t7hrbH92lZNq+xiBADQoP0yHPTt0Ekvla7JkTh4J8FmDG0A0YmxWNgh6a4Y8UuZOSWSnPBnakpLw9xuY09JSdpLsv2PTI0yHLiVGWmu9pkxi5r5/LhLoJuwHOTO6UnZpdN7oeiiiqve4UAtn0Ttluz9OfySh22EwRBqhQY0ikOxuAgFFeYUG79PHvg2k5S4qmk0iQ14vS1vLxpZAiKKqpRXs1Md70U6DXGOgMw/DHg/O/AyY2B3Rct9Pgr8OUM99vseBNo1R/oMa5Wdklrip8xzz//PIqLtVi/QFTTtfyars1t1gfXBjEw1rI802y9LbUd/B2C7uCazGldmcMrHxmmA/DAx3ulg0ORWMZ4Olf9GqO7h7T3+wkET9UDcp4yifK/mf287Vi7UvEQg17R7F5PcorKHTKmyS0tfSV6tFRfYRQIpy4WY9yy7Vi7/wIMeh3+cVN3LJ/Sv8EH3GrVxojDxkbp7+r6pATpNda/XSwAS5ClJOAWZRWUSUHiuL41VUZidZCrxmUms4A86wzp5irKy9WefBHHkoV62B97B/4sQFFFNWLCgj2+57irslK6v7klFdJ7Xp82lvuLUNFl3f4EdZumlkZ34pp3OfmJh5Agvc17fKfmEYiLDLVpHir+nUJdVCm5I7+dJuGWkyuc090ABAegD4lgsjQeO7O99u9bUzpg8EPAzqWAoOD9aP1j2nSODwDF0Y5YPkukhe+lruW1n6GTMt0aBt3ejwxzLC8XKRmLUxvk5eVlVSaXAaQOwLvblc+OFLvnjrAryfYHefWAUq4ODuUlhfblhfZzdk9fKtFkdq183byYMV23/wLSLhSioh5kSdYfzMSTqw+gpNKEFlGhWHpHv3q3Br221MaIw8ZGSafq5pGh0vtrQVklfj+T79V9vfBtuhSkJ8bWBPviyUtXme7LpZUwC5a15U0jlGe61Z58EWe/1zRSU/b+IXYtH9rZ+6VKgPqTSga9Dq1iw7H/XAFKVASnCTFGm07/bZpYgqI/nWS65SX2IQbboPtKa8O40CADQoL0qKw2I7fIctLZ/qSrEhGyJIP4d+aa7vpK9jroMQ7Y/6nl/y16ADmHnV5DcxlbgCrH53S9EhoDHP5S+falue67nNdhqt4x6sscbqrbTuYU4WROMYINuloJuOyJa3K1zHSbpDXd6q4XGmxfXm57Hkyrddm+CJYdgLj7lQkACso8z0oFgElXtpHWPUbUUqWDWD3QNEJZZtXVwaH8b2a/Jj8mzPZgWc1BojPu1u97ypzVBZXVZjz/zWH8/dO9KKk0YVCHpvj24WEMGN3wpes5OaekU/WYXpbgzN0aaSXEgFuvs4yWEgVL5dzO30SzCy0n58JDDPj19GXFn09KeoDEycrVvc10i+XZwzr7thzMm54l4Soy3FMHt3PaN0Fc0/3n5TKpMk0kLosK0uug0+lsToz0a9dE+r+4BltseOddprvm807MdDPorq9kzyN5qbm7DtxaO7+/9u7LXyrygcIL6q5Tm79jDakKurt27YqmTZu6/SLy5PtDliz30M7NAlJaKh6sVpu1C1bMXjZSs2+iFVYHOzdrMcfW3h8XS1BqzS446+bqLynJidj1zEi3mSRPjerkB1r2fy/7THd8tPIyUbUBVqgXI39qU2ZBGSa9sxPvbT8NALj/mk745N5BLItWoC4tL2koPHWqfm/HaQxb9COW/nhCk+oUswAMW/ST1JTR3Umy1LRM3PW/PQCAkgoTJq/YJY378kTJSZr7hneULhPfo9SctCuuqMbes5cBAMN8HO/pzUklNUH3jcmJTk9QJ8YYYdDrUGkyI9uuikn8Hej1Ogxb9KNNCforqcekv0OktUrgknWeuv1JcyXkn3fiCWDO6W5gSnNr774qCmrvvuqSSPfjXesqVUe7zz//vFcdyonkakrLa69ruZw/Mt3SyDCV2Sf7D201Bxe1JVjt8HEF9pyuWS+941QubunTSvP7cCUkSI+X/poslZqrbVQnLyn0tKb7yvbKSoX/OToJL6y3bRhkXx7p7HEAdTPo3n4yFw9/tg+XSioRZQzC6xP64Pqk+vkhGSh1ZXlJQyL+Tpf+eBJvbDru8POsgnK8semEZveXVWjpdfH2lH5Sptu+vNxdo0Xxup5OsognFOybjonvIe2bReAl6/QEh6BbQSO13X9cQrVZQNum4Wgb5/vaVU/7a/94w62BaniIAWWVzpc4AZb3Y1cnS4MMerSMNeJcXhnO5ZUhMSZM+plYYl9ZbXY44ZJbXCH9HSLtMt32XeqViJSt6Y4V13Qz0133ma3rp4uzLQFfuyE16woBoFD9qFRN5J4MzP0GWsnFQO+BV1QF3ZMmTUKLFrVfDkwNx9lLpUjPLIRBr8P1SYEJumsy3Vqu6fayvNxJ9/K6Rq/XIUivQ7VZQLBBh2qToKoJmCePfL4fwXodburVUsNbdU/tQZ+cbSM195nuyNAgRZ3oU5ITMSpZXYAlZtzrUtBtNgt4a8tJvL7xOMwCkJQYjben9EO7uIhA71q9pKbrOSnnamSVvzrXPP9NOq7uaskQV8ler55msuvgfpKCnLuTNCdzaprgRotBt/UkgJKRYeJ87qE+ZrmV7q+9cOv7bL+2sdh+8pLL20xKjHZ7n22ahFuD7lKb4LzUTaZZ/ndo08QSqBeVW5ZRnc8vg8ksqDoRZpvpZnl5vZC+Dkh9yrYEOiQSMMkayp7aVPv7BQCVhYDD0UV9ogOiEoGKQqBSRbPu754Akm4B9HXvmNkdxUE313OTFsTZ3IM6NFXVLEZLQQY/rOk2ezmnux6UlwOW9YjVZhM6NY/EsawipwGkAEumt6C0SvXb/6zP9mEpdLipV+2VzXqbSbRtpOY+020MNigO8NUGWHVtTXd+aSXmfHEAPx61jEybOKANnr+lh1ezbIn8RenIKneijAYUlSsLlMRpCPkllrXe8ky3mpnsSt4bXL2HyCdQ1Kzptp60U5DpFpuouRsV5g2l73liprtpRCjentIPD322D1VO1sbvPZsvja90duLU0kztksOs7v3n8t3ev/h3sH+/X7E1A98ezPR4olZOfmI9p9CSMS+trMbOU5dY0VKXiJntY98Bu95y/LmaANHfutwAnNgQ6L3w3o2LgO1vAud/VX6detpMjd3LqValHg5saTlQ073cL+XlaoPuelBeDljWdZdVAa2bhGH2yC4uA0gATrO6npgF4MFP92K5vnbXq3qTSbTJdDs5aRJs0EkHhEbZCDitS4VDVXYf9qeDf+bjwU/24s/LZQgN0uOFW5Ix4co2gd4tIgdqRmy5qk5ZdGsvzF132GF0ojtiGbc8yK2tmexBsr4catd0ZxWU40ROMXQ6ywzrQBA/F0srTUhJTkRcxGFkWQNWe+7K8qWxYXm2Y8MuFjm/LXsZuY5dotUsA0hNy8RCa5k/AGmJw5+XyzF5xS7p8kQFFVfkR84y23VZq371M+jW6YHxHwDdRgPrHlZ//XrYTE1x0G3WsOkUNU6ZBWXYdzYfOh0wqkfggu4gjcvL5SekGmJ5OVBzgBYaZPAYQDrL6iqltJQykNyNDNPpdIgJC0FucQVCDHqbx6F1qXBdWNMtCAI+3XMWz69LR6XJjLZNw/H2lH71ZnY4NT5KG/k9OrIrPv/1rMvqlOJKE55cfVDx/UZZm4ZWVdd8XtTWTHb5yeCconKYzILioFvsWt6zVYy0Brm21QTd1agymZHtIuAG3Jflix3M7TPd8tnZaildBuBq7b4zagJ50lj6OuCLqai9cm0fS8NDIoEtCzXbm1oV3gxIGgtkbAXKL6u/fj1splZ7bYOp0dtgbaDWr20TtIgOXAdjg8aN1OS3ozbTHWLQQ6er6cdRl8vLgZqTBO4CSPugPLeoAi+sP6LoftSUUgaKuzXdABATFoTc4gqvZriq2w/LfQeqvLys0oR/rD2ENXvPAwBGdo/HaxN6O6xrJ6pLlM5BnzWiM2aN6Ozy5OLt/VrjqdUHFR0ux0WEoLU1y3o8uwg7T13CwA5Na2Ume2paJuatq5kZvOynU1iz9zw6t4gE4LlSZps0Kkzb0nI1wq1jJUsrTW5nrYtcleW3djGru4OPPSc8LQNwt3bf1e2pWc9PGjGbLBnuWl0f7eN96erzem4rbzLW0a0szezqGQbdVGsC3bVcpHWmW34zaruX63Q6hAbpUW5tZhNeSzOr1ZKCboWBpDwoN5kF/N+2DMWZb19LKf3N3ZpuQOxIW+L3qoVAZrr/uFiMBz/Zi6NZRdDrgCdTuuG+qzuy9wfVeeLIKk/NDcVAx9UJQL1eh+iwYBSUVXm8z95tYvDhjjMALEHstpO5UgmxuC/2tJjJ7q4zuvh+7O6knSAINUG3xuu51YiwvpeWVFThh3TlXaLtP0vaWjPdmYXlqKg2SScuq2Qf4r6EMK4+u7zpI6B2PT9p4MyO+lNSDgBBRqCiKNB74T2T9b3Tm4x18m31rokaoHJON5G3cosr8Kt1TFQgS8sBeaZbm2DF7EN5OWA397mOlpeLs7rtG78pIZ/LqkRdn+PsKdMdbZ3lajIL2Hnqkqa9A5ztR2Utr+lOTcvE2KXbcTSrCM0iQ/HJvVfh/ms6MeCmekOrOejinGVPfjx6EcUV1TaXiSXEgGVJjv1nh68z2T11Rhe5G1d1PLsYF4ssVTv92zXxaj+0IH4u/nGxBC98q6xqCnD8LGkWGYKwYAMEAbiQXxMEiycekhKjHZ4TSv/Gzu5P5MuJ5Lp+ErpBqW9rhHV183hROes7UbshQKTKuCDtS0tlQj1TN9Nq1OD8cDgbZsGyLkxcVxUoQdZGatVOup96Q95jUG15OWAbxNXFoNtkFqSDkkvFFapHpACWg9y37uiLWZ/tg6sYVItSytrgbk13alomdv1hGWmTW1yJySt2+a0pTmgtZ7qrTGYs+v4o/m9bBgBgYPumWHpH34AuFSHylq/NDVPTMnH+cpnH7fQ6OH3Pk5cQb3tqBEIMepRXmzFvTBK6JUb73GhRaXbVvqmY3NYTllm4AzvEeXXCVSv7zuYDAJR+ZLv6LNHpdGjdJAwncopxLq8UHZpZysrF99BWTcLwzUPDbJ4TWYXleHTlfo/3GRse7PKzy5cTyXX9JHSDUt/WCFeVBHoPfCNmuvUGYMA9wJaXlF+38Hy97F7OTDfVCrFreUqAS8sBP6zptsl0exF0y0qUw+vYmu7UtEwMW/QjzlkPLr85aPk+1Tr6TY2berXE0sn9nP5Mi1LK2mKb6a75v1jKWWY391bMaHnzO3OnNkeGZReW444Vu6SA+29Xd8QnMwcx4KZ6TVwGc0ufVhjcKU5VwP3Ax3tR6SYKFG/J3ceMWEK87eRFlFtfx7cNaK1qX1xRmiEtKnddHi82URsewPXcJrOAt38+pXh7T58lzpqpidVCIUF6h+dEgsL3uLuHdHD5NxPX7qv9iybWg5PQDUq7IUB0S0D1X4q8UlUCHF5r+X/TjuqvX98qE8Cgm2pBQWkVdlg/vAO9nhvQfk63TXm5F6+oulpeLh5Y2mdLfAkib+qViOVT+iHRx7LOQJJnt8XGd0pKOZ//Jl3TUnNpzq6fg+6dpy5h9Jtb8evpy4gKDcLyKf3x7E3dpXX+RI2J0qZYCTFGzBjaXtFtnrlkCQANeh2iQrUpQFSaIQ1y8TqurDZjd4ZlSdjQAAbdezLyFI/0Ajx/lrRp4jg2THwPtZ8mAigLmGPDgzFrRGeXP1e7xAqwhH314SR0g6I3ACmLAr0XjcuqaUDaWqA8X/1161tlAlheTrVg05FsVJsFXBEfhY7NIwO9O9KHmGYjw2QxjzeZbjFbqtM5/9APBE9BpC+dVf0xs7o2OVvT7amU0x9NcUL9nOk2mwUs/+UUXt1wDGYB6JYQhben9JdKMokaI6Vl26/e3ht6vQ7/237a47ZG6wm0mLBgzXojeOqMLiour5K6qcvfg/eevYzSShOaRYagW0KUJvvkDaUZ+6mD2+HG5ESPnyXOM92ug253jfdEC2/t6fHzS+wjoGScZpPwYCy4tWe9OAndIIXFAmVejLBSq8etwOGv4HXrvuAwoMrzEpc6b/V0oN1QddcJb1Yvu5fXjSN8atDEruWj6kCWG6jpXl5nysutB1zhwYY604xKTRDpDW/LOuuCYNmB2b6zl2EyC4oPDLVsilOzplv7ZiIFpVX420e/4eVUS8B9W7/W+OrBoQy4qdFT+hrOLanwmCXVwVJCLGZfYzUctyfPrrp7d93/ZwEmr9jlsGxILC0f2rmZ6qkcWlKasb8xOVHRZ4mzsWE1mW7nlWauGu8lxhixXEWFVkpyIrY9NQKfzbwK9wxtj6YRtnPPY8OC8ejILvjtuesZcAeCOKO7NgJuAOg2GpjwIRDu5Yn4zqO03Z+AEYAz29RdZfRr9bJ7OTPd5FfFFdX4xdqMpS6UlgPyTHdd6V5uCZ7qUml5IILI+iA1LRMLvzsqfT95xW4kxhgx6co2iq6vZVMcf63pTjtfgAc++R3n8soQEqTH82N7YNKVberMCSGiQFL6Gm4RZVSUJZ13cxIKrZ3NY8K1nXGvJrsqLhsSS7O3nqgJugNJPHHhav/VNuBsY52Xfk7WBK/SZHkPDXFTaaZVhZZ4wnlwpzj8Y3RSva34anACMaM7Mt7SCKxrCvB6d6A0V/l1dUHA+T3+27dA0AcBt/4fsP5R9yc+hjwM9BhXa7ulJWa6ya9+OpqDymoz2seFB7RETU7sXq71mm6dDl4FJnUx6FZzYNlYiGvc8+3m8mYVlOONTScQGx7sMaOlZVMcf6zpXvnrWdz69g6cyytD6yZh+PL+IZg8sC0DbiIrpdlr8bXuKksaHmKQAtyCUst7SpPwEPub85k8u/rGhN4O2VWRvPdEXnElDv6ZDwAYFuCg2916aG8acIrl5XklldIYtwrr2DRPy7u0rtCqzxVfDU6tzujWAdGtasqjg0KAMW+ouwmhOjAzxcObWYJefzBXAxHNgCdOAcMedX7ft38A3PCCf+6/FjDoJr+q6VqeWGcO3DVf0229GW9KywF5eXndKTxRe2DZ0ClZ4y6y/535qzN7iIYjw8oqTXhi1QE89eUhVFab8ZduLbD+oeHo2TrG59smakjclW27eq3LA9/Zf+kC4P/bu/Pwpst0/+OfJG3ThS5AoS17QQTKvgjSg8flgC0K2qNHHdBBOYIC4zkqOg6isriAOsrghqg/xcENxjPK4mgRQUUFQUGlLaiIZVHaspRSaOmW5PdHmqQtbSlt0qTJ+3VdvZDkm/RJ5Em+9/d+7vuxb8F3QTf752fB6TJJ7l1eXnPMI3u0VXx0mPKLyuo8zlE29MbX+2W1Sd3bRahDTJhHxnQuUvslqF2rMy8WNKYBZ1RosGIqVxQcrFxi7sh0+0pPFXjBSffuLnJWqY9XXx6ddJV04YzmHcO5Co+VZu62B73XvyEFmd3/O07l2d+XmkH35Y9J9/4s9Utz/+9sRnzCwGNKyi369MfDknxnablUpabbTft0OzLmjY2nQoLsDzxdbtGWvcfc2uG6sRpzYunPGlLjXlBcrrtGn39GRstTndnd1Uht39EiXfPiZr27/TcZDdKfU3rplUnD3L7UFfAXdWWv65vrjsD3ztE9NaBTtMotNr2z7YAk6XhlptvTc66h5UDb9h2T5N2twmpqV2VVVc/2rfTO1Av15V8ua9TnaufKum5H0F1afvbl5fBzRUea73ddcr89yK6p1xXNN4ZzYrD/jPubPSsvSVarVNHwXQUazNGR3FBjLiYMaJE13DX5TmoNfmfTz0dUXGZRh+hQDfChjJm7M92O5eWNyXSnZ+Zo/S77hYkD+cWa8MrXSogO1dzxSV5vpFJXPWC8j4yvOTX0ZLVbbLi+/MtlzVKnF+KGRmrrsnJ17z9+0MnSCrWNCNFzEwYr2YdOtAFf1dgaX4PBoP/+t0TdtfJ7Ld+yX7f9ew8VVAbdMWHuX15eVUPLgX45fEqSNKpnO08O55yEVym/6tcxukm7QHRuE6aM308467rP1kgNASCiGf+tt+1R++2OfcILc+SW2vLwWGnoLdIXT53b4wzG6tvyRHWwZ+YdFwqsFmnNHU0fX01Vl9zXDLpzM+wdzlt44E3QDY9Jr9K13FeWlkvu36e7scvLHTXCNUdRs6GNN7X07b3c5VybJ7lrW7D6ODLd5RabrFbbOXUYrrBY9dd1P+mlTb9KkoZ2ba0XJp6ZuQNQt8bO9Sv6J2jBh7t1+GSp/pVxSCccy8s9nOk+2zZiBkntIs3KKyyVyWjQiO6+Uz4UXmX/8vZRTVvWekamu4JMd8CLbMZzrfA6Lmw79gn/x6RzfMKabRorzwXG/c3eIf2Ht88tkLdZpZQF9qxzqzh7IFw12N33pVR28hzH2ABDbnb9np/Sq9+3bra05Xn7+1PbKoEWgk8YeERZhVXrd+dJsm/l4Uvc3b28McvLz1YjLNkb2vjKUvNAb/biizXu5mDXl6CjJrEhDheWaOL/2+oMuG8dlagVt11IwA00k5AgoyaN7CpJevXLbB0vqsx0ezjobkjZ0OVJ9uWdgzrHKCrUd0pMwqt83sVHNe2zqlNlM7XfjjuC7oY1UoMf6zzizOyqp9SXoEm6yr6N2FkvAlQ2Y7vu71JUjWOjOtifI+kqVyDveExDRbST+v+Xvbt6zexy9hcNf55z4VgBsGuN9M9bz7y/MMd+QWLXGs/8/mbAJww8YvPeozpZUqHYVmYN7dra28OpxtG93GqTrG4IassrAx6L1dbgmmxP74MN9/LFGvcQk+vju6HN1L7+9ZiufO5LbcvOVytzkJbcOEQPjUtSsImvAqA5TRjeReYgozJ/L9T2/fbtcXJOlHj8QuvZ6tGPV+7O4O2u5TWFm10n/nFNDLod+6IfzLcvLy8j042DW6svqfaks9WPJ10l3Z0lXTK7jgMqzzNSH7dvnXVXpnTzB9K1r9r/vCujejbYEcjXDM4bO0ZPneYc23uWrdsqb0ufZT+uBeITBh6xrrJreUrfOJ/LjFYdj8XWtBOc9MwcTXjla0lSSYVVE175WqOe2Kj0zPo7YbIPdsvTmOZJnhRsMjgvmJ+trttms+mlz/fqxv+3VUdOlur8uFZafce/6Yr+vrUKBQgUbVuZNazygrRjpcrjH/3YoO+PpnJ0U4+rXKY9b3ySvvzLZbo8KV6bf7HvFTyqp28F3aHBrtPVIydLm3RxwrFt2MHjxbLZbNR0w941u7k4moXVx2iSLvmLvUt4VIfq91XNZDuOTbyo7sy0ZD/2rkxp1D0NG2N9Ne5dRzXsOc7Vjr/bl67XuxWaTSr83b7FWwtETTfczmK16eMs31xaLrm6l0v2sQY38nu2KTXZ7IPdMvlSjbvBYFCIyajSCmu9HcwLS8p17z9+0Me77HPyPwd31GP/2U/hIXz8A96Snpmjr/YeO+P25urpYTIa1CEmTHmFpUqICZPJaFDm7yd0vLhcrcxBGtQ5xmO/+1ylZ+Zo9XeuE/G5a7K09PO9jW7o2TEmTAaDVFxm0bGiMjLdaFggLEl9r5Wy/tn431O1WVhDJF1lr8vev9l+YaC2GuuGMpqkHpdKXz599mPrW96eeJEU0koqO3XuY6hP4e8NX7renBdJ3IhPGLjdtux8HSsqU0x4sE81YnGoGiA1toN5U2uyfbFGGA3jSzXu5rPs1b3rUKHGP/elPt6VpxCTUY+m9dOi6wcScANe5Pj+qE1z9vRoE27vln68cu/uL/bYs9wXdm/jMyUnjovbRWXVV/M4Lk40ZlVAaLBJcZUXtA/mF1PTDVfn8PrWTkd1lAbf2LTfU7VZWEM1JJPdUM7XWY+zXRgwmqSrXmj8GOpz5KeGHVdXMzofxycM3M7xJTi6T5zPfHFXVS3T3ci9uptak+2LNcJoeUIql0M69pmt6t1vD+o/l3yl/ceK1TEmTO9OG6mbLuzqUzsJAIHIV3p6tI6wB935xfag+6vKpeX/5iP13J5sONq5TWVd9/HTzuX9ZLoDWLWGY3Xod61kamJzwbq2C2suztdZT8on9fGzB/b90qTk/3Xz4CT98nHDjmuh5zF8wsCtrFab0ivrucf2i/fyaGpXPdPduMYZ7qjJ9rUaYbQ8jsxM1e7lJeUWzfrnTv35/3aqtMKqS3q10wf/M0oDfWi5KBDIfKWnR5sIV6a7pNyibfvsQf5FPlLP7cmLE1W3DXNctCTTHeCSrpKS/6fu+zc/Jx34umm/o6HL2D3J2VitZq14x+q14mdz+SPSyHrer3NikMLaShUN/Mw7WzM6H8UaQ7jVdwcLlFdYqlbmIJ9rxOJgMBhkMhpksdoavXzPXTXZvlQjjJbFYrU5u+/v2J+v/h2j9fvx05r+1nZlHSqUwSDNHH2+/nTpeee0hzcAz/KVnh6tK5eX5xeV65t9+SqrsCouyqwe7Vp59Pc2lCcvTlTdNsxx0ZKgO8BZLVLm/9V/zLaXG/nkBnuQey713J7kjlrxhrxfDVJ5ftJtlLR7dcMe4gsXLxqBoBtu5ehaflnv9j7dCdQRdDe2pttRk517oqTWpW8G2TPWDanJdtQIAw2Vnpmj+Wt3KafQfrL58Ae79dzGX1RSbtHpcqvaRITomT8M0kU96+lACsAr3Pn90RRtIuxLZY8Xl+lLR9fy89r5TAmKJy9OVN02zJXp9t1zFjSD/ZvP3jm7URnWKlt8NaUe290cteKNtX+zdNINOy1EdbC/N7k7pd0NON4c6TsXL84Rl/XgNjabTR9V1nP76tJyB0ddd2Mz3VVrsmuiJhue5GgsVHPZ5fHicp0utyoxNlwf/M8oAm7AR/lKT482EfYtw44VlenLyiZqvrK0XPJsw9Gq24ZR0w1JnuuIXXOLL3/hrvfr6iX296ahW5Fd+CffunhxDviEgdtkHSrUwfzTCg026uJevn3C7ziZaWymW3LVZDvq4hyoyYan1NdYyKGk3Kq4KLaaA3yZL/T0cGS69x8rUtahQklS8nm+s+rKkxcnujiXl592XnxneXmAc/eS5eG3STd/IN2V4X8Bt+S+96vYfsFPiRdJYa3rPzaklXTxfe75vV7A8nK4TXqmfWn5Jee39/ktiVyZ7sY1UnNI7Zcgc5BJk1//Rh1jwvTUdQOpyYbHnK2xkORqLETJAuDbvN3Tw1HTXVBcLknqHR/p8Tryc+W4ODF/7a5qn33x0aGN3qdbkuKiQhVsMqi8yg4mZLoDnGM7rcIcqd5L2w3U56qmLd/2dV2T7ft5N3WJuSN4N5qk8c9K//hj3cemvdhis9wSQTfcyLG0PNXHl5ZLkslo/3JtSqbbwVb54dy2VQiBDjzKV7oeA3APb/b0iA6rvv1Rso9+f3ni4oTJaFDHmDDtO1bsvC3EB7c4RTNybKf1j0myr6Vowvnh2fa69gdGkzT2yfqD5LOp+T4lXSVd/4b00X3Vg/nIDtLYJ1r8igGCbrjFnryT2nukSMEmgy7r097bwzkrx3drRSP36a7KcaWc7DY8zVe6HgNo2dIzczRvza5qt73/3e8antjGJ0ujPHFxonObcGfQHWQ0KIigG47ttNL/cpamavVp4F7X/sARJK/9X+n08XN/fG3vkzs6q/sogm64xUeVS8tHnRerqNDgsxztfUGVme7GNlKryvEcwUa+sOFZvtL1GEDL5WjGWPMzpKC4XNPf3BEwPUk6Ve7VLbG0HFXUDPoO/yh98deGPTaklX0JdAvPyJ4Tx/uV/YW0/0v7AoHEi6TiY9Ka/5XKTp75mLA20vhn6n6fmtpZ3UcRdMMtHPXcY1vIF7U7Gqk5OJ6DTDc8zdFYaPqbO85Y/EbXfABnU18zRpvsnyPz1+7SmKR4v/8c6dwmzPnfNFFDNVWDvuwvGh503/CW1OMSjw3LZxlN9tdd87UnXW1///Ztkgp+k2I6SYkX2/fk9oPM9bki6EaTHThWrF05hTIZDRqT1DI2rG/qlmFVOZqxBZn8+wQFvsFTjYUA+L+zNWO0KXCaMXYm042G6JoshcVIpwvOfqyjEzfs6grGAxRBN5rM0UDtwu5t1LrG9lm+ypXpblr3cslVF+7vWQH4Dm93PQbQMtGM0cWxV7ckmYMCL+uGBjKapBEzpM8WnP1Yd287Br9C0I0mc9Rzp/b1/a7lDia3ZrrtzxFEwINm5M2uxwBaJpoxunRpQ6YbDfTv90pbl0qn8+s4wGDfbszfO5ajSfiUQZPknDit7w8WyGCQUlpQ0O1YCu6Omu5yaroBAC2AoxljXd9WBkkJAdKMsXV4sMKD7afBJWUWbdl7zC0X4uGHjCZ7469aZ07lbYHSsRyNRtCNJnE0UBvapbXaR7WcK+OOfbotbtgyzGJx1HQznQAAvsvRjFE6M3wItGaM67JyVVZ5DvBbwWlNeOVrjXpio9Izc87ySAQkx3ZiUR2q3x7VwX57IHUsR6OwvBxN4gi6U/u1nCy3VKWRms193ctZXg4A8HU0Y6x727TcEyUBtW0azpEf7yENzyPoRqMdPVWqb/bZ61taWtDtiZruQMgMAABavkBuxsi2aWgSP91DGp7nE+thX3jhBXXr1k2hoaEaMWKEtm3bVu/x7777rnr37q3Q0FD1799fH374YbX7bTab5syZo4SEBIWFhWn06NHas2dPtWN+/vlnXX311YqNjVVUVJRGjRqlTz/91O2vzZ99nJUnq00a0ClanapsvdESBHlgn24y3QCAlsLRjPHqQR01skfbgAkwz2XbNABwF68H3StXrtTMmTM1d+5c7dixQwMHDlRKSooOHz5c6/GbN2/WhAkTdOutt+q7775TWlqa0tLSlJmZ6TzmySef1LPPPqulS5dq69atioiIUEpKikpKXB+y48aNU0VFhTZu3Kjt27dr4MCBGjdunHJzcz3+mv2FY6uwlpbllqpmupu+ZZgr0+316QQAAOrBtmkAvMHrUcKiRYs0depUTZ48WUlJSVq6dKnCw8P12muv1Xr8M888o9TUVP35z39Wnz599Mgjj2jIkCF6/vnnJdmz3IsXL9aDDz6oq6++WgMGDNDy5ct16NAhrVq1SpJ09OhR7dmzR7NmzdKAAQPUs2dPPf744youLq4WvKNuJ4rLtWXvMUkta6swB2em2w2N1CocjdQCJEsAAEBLxbZpALzBq0F3WVmZtm/frtGjRztvMxqNGj16tLZs2VLrY7Zs2VLteElKSUlxHp+dna3c3Nxqx0RHR2vEiBHOY9q2batevXpp+fLlKioqUkVFhV566SW1b99eQ4cOdffL9Evrd+epwmpTr7hIdW/XytvDOWfO7uXuXF5uIugGAMCXsW0aAG/waiO1o0ePymKxKC4urtrtcXFx+vHHH2t9TG5ubq3HO5aFO/6s7xiDwaBPPvlEaWlpioyMlNFoVPv27ZWenq7WrVvX+ntLS0tVWlrq/HthYeE5vFL/01K7lju4s6bbQk03AAAtgmPbtOlv7pBBqtZQLdC2TQPQfLy+vNwbbDab/vSnP6l9+/b64osvtG3bNqWlpWn8+PHKyal9f8aFCxcqOjra+dO5c+dmHrXvOFVaoU17jkiSxvZvmUG3yeS+7uUV1HQDANBiOLZNi4+uvoQ8PjqU7cIAeIRXM92xsbEymUzKy8urdnteXp7i42sP5uLj4+s93vFnXl6eEhISqh0zaNAgSdLGjRv1wQcf6Pjx44qKipIkLVmyROvXr9ff//53zZo164zfe//992vmzJnOvxcWFgZs4P3pj4dVVmFVYmyEesVFens4jUKmGwCAwBXI26YBaH5eTc2FhIRo6NCh2rBhg/M2q9WqDRs2aOTIkbU+ZuTIkdWOl6T169c7j09MTFR8fHy1YwoLC7V161bnMcXFxZLs9eNVGY1GWevoZm02mxUVFVXtJ1BVXVpuMLTMLyd3di+vqHwOvqgBAGg5AnXbNADNz6uZbkmaOXOmbr75Zg0bNkzDhw/X4sWLVVRUpMmTJ0uSJk2apI4dO2rhwoWSpDvvvFMXX3yxnn76aV155ZVasWKFvv32W7388suS7PXad911lx599FH17NlTiYmJeuihh9ShQwelpaVJsgfurVu31s0336w5c+YoLCxMr7zyirKzs3XllVd65X1oKUrKLfr0J/t2bi2xa7kDmW4AAAAAzcHrQfcNN9ygI0eOaM6cOcrNzdWgQYOUnp7ubIR24MCBahnp5ORkvf3223rwwQc1e/Zs9ezZU6tWrVK/fv2cx9x3330qKirSbbfdpoKCAo0aNUrp6ekKDbXX7sTGxio9PV0PPPCALrvsMpWXl6tv375avXq1Bg4c2LxvQAvz+c9HVFxmUceYMA3oFO3t4TSas3u5G7YMK7c4updT0w0AAACgOq8H3ZJ0xx136I477qj1vs8+++yM26677jpdd911dT6fwWDQww8/rIcffrjOY4YNG6Z169ad81gD3brKpeUpfVvu0nKJTDcAAACA5kFqDg1WVmHV+t32JnYttWu5g6um253dywm6AQAAAFRH0I0G27z3qE6WVKhdpFlDu9S+n3lL4d5Mt72RWpCJoBsAAABAdQTdaLB059LyOBlbeFbXtU+3G7qXW8h0AwAAAKgdQTcapMJi1ce7KpeW90s4y9G+zxM13cFGphMAAACA6ogS0CDb9uUrv6hMMeHBGp7YxtvDaTJn93I3BN3l1HQDAAAAqANBNxrE0bV8TJ84BfvB1ljUdAMAAABoDi0/eoLHWa02pWfZg+6W3rXcwdm93A37dFPTDQAAAKAuBN04q+8OFiivsFSR5iD923mx3h6OW7BPNwAAAIDmQNCNs0rPzJEkXdanvcxBJi+Pxj1c+3S7oXu5s6ab6QQAAACgOqIE1Mtms+mjynrusf38Y2m55KFMNzXdAAAAAGog6Ea9sg4V6rfjpxUabNS/n9/O28NxG5PJjd3LLZWN1FheDgAAAKAGgm7U66PKpeWXnN9e4SFBXh6N+3gi000jNQAAAAA1EXSjXumZ/tW13MFV0+3ORmpMJwAAAADVESWgTnvyTmrvkSKFmIy6rHd7bw/HrdyZ6a4g0w0AAACgDgTdqJOjgdqonrGKDA328mjcy53dy9kyDAAAAEBdCLpRJ0fQnepHXcsdHEF3hcUdme7KRmp0LwcAAABQA0E3arX/WJF25xTKZDRoTJ84bw/H7YLcWNPtCNyp6QYAAABQE1ECauXIcl/YvY1aR4R4eTTuZ6oMkKnpBgAAAOBJBN2oVbpzaXmCl0fiGe7MdFPTDQAAAKAuBN04Q86J0/r+YIEMBimlr/8tLZeq1HS7JdNtrfacAAAAAOBA0I0zOLLcw7q2VvvIUC+PxjOCPNG9nEZqAAAAAGog6MYZPvLzpeWSuzPdNFIDAAAAUDuiBFRz5GSpvtmXL8k/twpzcGSlm1rTbbHaZKt8Cmq6AQAAANRE0I1qPt6VK5tNGtApWh1jwrw9HI9xdi9v4j7dFVWWp5tYXg4AAACgBoJuVOPqWu6/WW7JlZW22pqe6a75nAAAAADgQNANpxPF5dqy95gkaawf13NL7qvprvp4upcDAAAAqImgG07rd+epwmpT7/hIJcZGeHs4HuWufbotlqqZbqYTAAAAgOqIEuCUnpkjyf+XlktVMt2Wpm0Z5sh0GwxkugEAAACciaAbkqRTpRXatOeoJP9fWi65stLu6F5ufz4CbgAAAABnIuiGJGnjj4dVVmFVYmyEzo9r5e3heJyj03hTa7rLKzPlZLkBAAAA1IagG5KkdVW6lhsM/h9Auq2m25npZioBAAAAOBORAlRSbtGnPx2WJI0NgHruqiqsNm3Ze7TRwbcjU06mGwAAAEBtCLqhz38+ouIyizrGhKl/x2hvD8fj0jNzdNXzXzr/PuGVrRr1xEZnI7lzQU03AAAAgPoQdEPpAbS0PD0zR9Pf3KG8wtJqt+eeKNH0N3ecc+BdYbXXdAeZ/Pt9AwAAANA4BN0BrqzCqk9250ny/6XlFqtN89fuUm0LyR23zV+765yWmlPTDQAAAKA+RAoB7qu9R3WypELtIs0a0qW1t4fjUduy85VzoqTO+22Sck6UaFt2foOfs9xCTTcAAACAuhF0BzhH1/KUvnEy+nngePhk3QF3Y46TqOkGAAAAUD+C7gBWYbHq412OpeUJXh6N57WPDHXrcZKrpptMNwAAAIDaEHQHsG378pVfVKbW4cEakdjG28PxuOGJbZQQHaq6wmODpIToUA0/h/fCwpZhAAAAAOpB0B3AHF3LxyTFKcjk//8UTEaD5o5PkqQzAm/H3+eOTzqnANqxT3dwALx/AAAAAM4dkUKAslptzqA7EJaWO6T2S9CLNw1RfHT1JeTx0aF68aYhSj3H98JCIzUAAAAA9Qjy9gDgHd8dPK7DJ0sVaQ5S8nltvT2cZpXaL0FjkuJ1xTOb9FPeKd09uqfuuKxnowJn5z7dBN0AAAAAakGmO0B9lGHPcl/Wp73MQSYvj6b5mYwGJcSESZI6xIQ1OlNdQU03AAAAgHoQdAcgm82m9CzH0vJ4L4/GeyJC7As9isssjX4O55ZhJoJuAAAAAGci6A5AWYcK9dvx0woLNuni89t7ezheE2G2Z/hPlVY0+jkqLI59uplKAAAAAM5EpBCAPsrMkSRd0qudwkICb2m5Q7gz0934oNuZ6WZ5OQAAAIBa+ETQ/cILL6hbt24KDQ3ViBEjtG3btnqPf/fdd9W7d2+Fhoaqf//++vDDD6vdb7PZNGfOHCUkJCgsLEyjR4/Wnj17nPd/9tlnMhgMtf588803HnmNvsJms+mjyq7lqQG8tFySWpntQXdRaeOXl1PTDQAAAKA+Xg+6V65cqZkzZ2ru3LnasWOHBg4cqJSUFB0+fLjW4zdv3qwJEybo1ltv1Xfffae0tDSlpaUpMzPTecyTTz6pZ599VkuXLtXWrVsVERGhlJQUlZSUSJKSk5OVk5NT7WfKlClKTEzUsGHDmuV1e8uew6f065EihZiMuqx34C4tl6Rwdywvd3Qvp6YbAAAAQC28HnQvWrRIU6dO1eTJk5WUlKSlS5cqPDxcr732Wq3HP/PMM0pNTdWf//xn9enTR4888oiGDBmi559/XpI9k7t48WI9+OCDuvrqqzVgwAAtX75chw4d0qpVqyRJISEhio+Pd/60bdtWq1ev1uTJk2Uw+Hfw5OhaflHPWEWGBnt5NN7lyHQ3ZXl5hXOfbq9PJQAAAAA+yKuRQllZmbZv367Ro0c7bzMajRo9erS2bNlS62O2bNlS7XhJSklJcR6fnZ2t3NzcasdER0drxIgRdT7nmjVrdOzYMU2ePLnOsZaWlqqwsLDaT0vk6FqeEuBLyyVXTfepJiwvp6YbAAAAQH28GnQfPXpUFotFcXFx1W6Pi4tTbm5urY/Jzc2t93jHn+fynK+++qpSUlLUqVOnOse6cOFCRUdHO386d+5c/4vzQfuPFWl3TqFMRoPG9Ik7+wP8XKvK5eXFTVpeTtANAAAAoG4Bvyb2t99+07p163TrrbfWe9z999+vEydOOH8OHjzYTCN0H0cDtZHd26p1RIiXR+N9EWZHprsp3cup6QYAAABQN68G3bGxsTKZTMrLy6t2e15enuLja1/+HB8fX+/xjj8b+pzLli1T27ZtddVVV9U7VrPZrKioqGo/LQ1dy6tzbRlG93IAAAAAnuHVoDskJERDhw7Vhg0bnLdZrVZt2LBBI0eOrPUxI0eOrHa8JK1fv955fGJiouLj46sdU1hYqK1bt57xnDabTcuWLdOkSZMUHOzfTcUOFZzWDwcLZDBIl/dlablUdcuwpjdSC6KRGgAAAIBaBHl7ADNnztTNN9+sYcOGafjw4Vq8eLGKioqcTc0mTZqkjh07auHChZKkO++8UxdffLGefvppXXnllVqxYoW+/fZbvfzyy5Ikg8Ggu+66S48++qh69uypxMREPfTQQ+rQoYPS0tKq/e6NGzcqOztbU6ZMadbX7A3plVnuC7q2UfvIUC+PxjeEh9hruoua0r2cTDcAAACAeng96L7hhht05MgRzZkzR7m5uRo0aJDS09OdjdAOHDggY5UsYnJyst5++209+OCDmj17tnr27KlVq1apX79+zmPuu+8+FRUV6bbbblNBQYFGjRql9PR0hYZWDzZfffVVJScnq3fv3s3zYr2IruVncmS6S8qtqrBYFWQ692y1s6aboBsAAABALQw2m83m7UG0RIWFhYqOjtaJEyd8vr77yMlSDV/wiWw26atZl6ljTJi3h+QTSiss6vVguiRp57zLFdWIfcvnr83Ssq/26U+X9tCfU/z/4g0AAAAAu4bGhBSiBoCPd+XKZpMGdoom4K7CHGRScGXX8cbWdVucy8uZSgAAAADORKQQANKdXcsTvDwS3+PoYF5U2rgO5uzTDQAAAKA+BN1+rqC4TFv2HpMkjaWe+wxN7WBusdBIDQAAAEDdCLr93Ppdeaqw2tQ7PlLdYiO8PRyf09QO5uU0UgMAAABQD693L4dnWKw2bcvO198375MkpfQly12bCHPTlpdb2DIMAAAAQD0Iuv1QemaO5q/dpZwTJc7b3tq6X30SIqnrriHCbM90Fzcy0+2o6Q5uxHZjAAAAAPwfQbefSc/M0fQ3d6jmPnDHTpVp+ps79OJNQwi8qwgPtgfd7+/4TT/lnlTr8BDFRprVvpVZVptNW7OPSTJoZI+2uqBbG23ff1yHT5YoNsJ+/+5DhZKkfceKZLHayHgDAAAAqIZ9uhvJF/fptlhtGvXExmoZ7qoMkuKjQ/XlXy4jOJT9AsX/vvOdyiwNmwIG6YyLGVXFhAfr8Wv6c1EDAAAACADs0x2AtmXn1xlwS/aAMedEibZl5zffoHxUemaOpr25o8EBt1R/wC1JBcXlmvbmDqVn5jRtcAAAAAD8BkG3Hzl8su6AuzHH+SuL1aZ5a7I89vzz1+5yNlgDAAAAENgIuv1I+8hQtx7nr7Zl5yu3sNRjz89qAgAAAAAOBN1+ZHhiGyVEh6quam2DpIToUA1PbNOcw/I5zZHpD/TVBAAAAADsCLr9iMlo0NzxSZJ0RuDt+Pvc8UkB30StOTL9gb6aAAAAAIAdQbefSe2XoBdvGqL46OpBX3x0KNuFVRqe2EbxUWaPPT+rCQAAAAA4sE+3H0rtl6AxSfHalp2vwydL1D7SHgQGeobbwWQ0aN5VfTXtzR0eeX5WEwAAAABwYJ/uRvLFfbpxbtIzczTrvQwVFJe75flahwdrIft0AwAAAAGhoTEhmW4ELMeKgK/3HtOWX4/KapNah4coNtKs9q3Mstpsenvbfn2x56hOlVpqfY6IEJP+/fx2uunCrrqwe1sy3AAAAACqIdPdSGS6A4fFanMu1Y+NMEsG6eipUpbtAwAAAAGMTDfgJiajQSN7tPX2MAAAAAC0QHQvBwAAAADAQwi6AQAAAADwEIJuAAAAAAA8hKAbAAAAAAAPIegGAAAAAMBDCLoBAAAAAPAQgm4AAAAAADyEoBsAAAAAAA8h6AYAAAAAwEOCvD2Alspms0mSCgsLvTwSAAAAAEBzc8SCjtiwLgTdjXTy5ElJUufOnb08EgAAAACAt5w8eVLR0dF13m+wnS0sR62sVqsOHTqkyMhIGQwGbw8H8JjCwkJ17txZBw8eVFRUlLeHA3gV8wFwYT4ALsyHwGSz2XTy5El16NBBRmPdldtkuhvJaDSqU6dO3h4G0GyioqL4EgEqMR8AF+YD4MJ8CDz1ZbgdaKQGAAAAAICHEHQDAAAAAOAhBN0A6mU2mzV37lyZzWZvDwXwOuYD4MJ8AFyYD6gPjdQAAAAAAPAQMt0AAAAAAHgIQTcAAAAAAB5C0A0AAAAAgIcQdAMAAAAA4CEE3QAAAAAAeAhBNwC3YTMEwIX5ALgwHwAEsiBvDwBAy3b8+HEVFRVJkjp16uTl0QDexXwAXJgPgMuRI0eUk5Mjo9GoLl26KCoqSpL9gpTBYPDy6OBpZLoBNFpGRoaSk5OVmpqqHj16aMqUKfrwww+d95PZQCBhPgAuzAfAJSMjQyNGjNDEiRM1aNAgTZw4UcuWLZMkGQwG5kMAIOgG0CiHDh1Samqqxo4dq9dff12vvfaa9u3bp7lz5+rVV1+VJK7cImAwHwAX5gPgcvjwYY0fP15paWlas2aN1q5dq/bt22vOnDlasGCBJALvQMDycgCNkpGRoXbt2mnevHmKiorSsGHD1LdvXy1dulSLFi2S2WzWTTfd5O1hAs2C+QC4MB8Al4MHDyoyMlIzZ85Up06d1L17d/Xt21c9evTQokWLFBoaqpkzZ3Ihys+R6QbQKMHBwdq/f7/27NnjvG3QoEG68847NWLECL3++uvKysry4giB5sN8AFyYD4BLUFCQ9u7dq127djlv69Kli6ZMmaIZM2Zo2bJl+vTTT704QjQHgm4AjZKQkKAOHTpo/fr1Kisrc97ep08fTZ06VVlZWfr++++9N0CgGTEfABfmA+ASFxen5ORkrVmzRrm5udVuv/HGG2U2m7V9+3YvjhDNgaAbQIOcPn1aBQUFqqiokGQ/eZowYYIeeughrVq1qtqxI0eO1IABA7R+/XovjBTwPOYD4MJ8AFwKCwuVl5en/Px8SVJ8fLyuv/56vfHGG1q+fLmOHz/uPLZXr146//zz9emnn8pqtXpryGgG1HQDOKvMzEzdc889OnjwoBITEzVkyBA98sgjevDBB5WTk6Obb75ZxcXFSktLU0xMjCT7cqouXbp4d+CABzAfABfmA+CSkZGhadOmKTc3V23btlVSUpJee+013XbbbTp27JgeeOABlZWVacKECerRo4ckeyf/Hj16UNPt5ww2WuUBqMevv/6qCy64QBMmTFD//v31/fffa/369erQoYM+++wzGY1G3XPPPXrxxRd1zTXXKD4+XsXFxXrrrbf09ddfq0+fPt5+CYDbMB8AF+YD4LJ//35dcMEFmjRpkpKTk7V371698sorCg4O1tq1a9W9e3c99dRTeuGFF9StWzd17txZkrR69Wp99dVX6tevn5dfATyJoBtAvV599VW9+eabWrdunUJCQlReXq4vv/xSU6dOVWxsrL7++mtJ0vLly7V582b98MMP6tatm+6//34NGDDAy6MH3Iv5ALgwHwCX9957TwsXLtSGDRsUFRUlyX5hauLEicrPz9fmzZsVGxurDz/8UN9//70+//xz9ejRQzNmzCDgDgAE3QDqNW/ePL3++uvat2+f8zabzaatW7fqxhtv1IABA/T+++9LkqxWq2w2mywWi0JCQrw0YsBzmA+AC/MBcHnhhRc0b948HTlyRJL937zRaFROTo7Gjh2rsLAwbdmyxXm8zWaTzWaT0UiLrUDA/2UAtXI09LjiiisUHByst956y3mfwWDQ0KFD9fDDD2vv3r3avHmz8z6TycQJFfwO8wFwYT4ALo785fjx42U2m/X4449LkoxGo6xWqxISErRkyRIdPXpUK1ascD7GYDAQcAcQ/k8DqMbRfdbxJdKxY0clJSXpnXfe0RdffOE8Ljg4WGPGjNFvv/2mjIwMSeLLA36H+QC4MB8Al9LSUkmueRETE6PrrrtOH374od555x1Jrn/3/fr1k9Fo1K+//ipJNE0LQHwCAnDavXu3br/9dl177bWaMWOGdu/erY4dO+rRRx9Vdna2/vrXv+rjjz92Hh8bG6sBAwYoIiLCi6MGPIP5ALgwHwCXrKwsTZgwQWPGjNH48eP1+eefKyoqSnfffbeioqL00ksvadmyZc7jo6Ki1L17d5nNZkmuC1cIHATdACRJP/30k0aMGCGLxSKz2axffvlFgwcP1iuvvKL+/fvr7bffVm5urh555BHNmjVLH3/8sWbOnKmdO3cqOTnZ28MH3Ir5ALgwHwCXPXv2KDk5We3atdPgwYMVGRmpSy+9VA899JBiY2P1/PPPKy4uTn/729/0xz/+UW+++aamT5+uzZs366qrrpJEpjsQsU83AEnSc889p0svvVSvv/66JKm8vFzz58/X7bffrlOnTunuu+/WG2+8oTfeeEPvvvuu/vWvfyk0NFQbN25U9+7dvTt4wM2YD4AL8wFwWb58uS688EK99NJLztuee+45zZs3TyUlJVqwYIEWL16sDz/8UEuWLNFPP/2kVq1aadOmTerZs6cXRw5vons5AEnSTTfdpODgYC1btszZcVOSHnvsMc2bN0/vvfeexo8fr4qKCtlsNhUWFspsNqtVq1ZeHjngfswHwIX5ALjce++9ysjI0Lp161RRUaGgIHsO86WXXtLMmTP117/+VTNmzHAeX1JSIkkKDQ31ynjhG1heDkCS1LVrV6Wnp+vEiRMyGo0qLy+XJD3wwAO69dZbNWPGDB09elRBQUEKDg5W27ZtOaGC3+rWrRvzAajEfABcunbtqi1btujQoUMKCgpSWVmZJOn222/XX/7yF9133306cOCA8/jQ0FACbhB0A7CbPHmyunbtqhkzZqiwsFDBwcHOE6spU6ZIstcxAYHgpptuUmJiIvMBAenkyZMqLi52/n3SpEnMBwSsX3/9VZs2bXL+fcqUKRo6dKiuvfZaHTt2TCEhIc5s9m233aY2bdpo+/bt3houfBRBNxCAfvnlFz3++OO6//779c477+j06dM677zzNGXKFP3888+65557VFBQoODgYElSfHy8zGazc1sMwJ8cOHBAy5Yt06JFi7R+/XpJUs+ePTVx4kTt3buX+YCA8vPPP2vUqFFauXKlM/A+77zzdNNNNzEfEHB27typUaNGafny5Tp8+LAkKSwsTPfee68k6YYbblB+fr4zk202mxUREeGcH4ADjdSAAJOVlaVRo0Zp4MCBstlseuqppzR+/HjdfffdmjJlioqKivT222/r6quv1osvviir1aqVK1eqvLxcPXr08PbwAbfKyMjQ+PHj1alTJx0/flx79uzRyy+/rFtuuUW33367Tp8+rffff5/5gICxfPlyZWRk6IEHHlBwcLCuueYahYeHa+rUqTp16pT++c9/Mh8QELKzs5WSkqI//vGPeuKJJ6p1HB87dqzKy8u1cOFCDRs2TEuXLlVwcLA2btyogoICDRgwwIsjhy+ikRoQQE6fPq3rr79eXbt21fPPPy9J2rFjh26//XZFRkZq1qxZuvzyy/XBBx/omWee0aZNm9S9e3eVlZXp3Xff1ZAhQ7z8CgD3yc7O1qWXXqo//OEPmj9/vk6ePKklS5Zo1apVWrt2rTp27CiLxaK1a9dqyZIl+vzzz5kP8HuffPKJPvvsM1VUVOhvf/ubXn75Zd14443OZlHp6el6+umn+X6A33vjjTe0atUq/fOf/1R5ebkWLVqkn3/+WXFxcRo3bpySk5P1008/ad68edqwYYNat26t4OBgLV++nPmAM5DpBgJIWFiY8vPzNXToUEmS1WrVkCFD9MYbb2j69Ol66qmn1KVLF40bN07jxo3Ttm3bFBUVpZiYGMXHx3t59ID7VFRUaNmyZRo0aJDmzp0rs9kss9mskSNH6sUXX3R2ZzaZTEpLS1NaWpq2bt2q6Oho5gP83vvvv6+srCwVFBRo+vTpiomJ0QcffKChQ4dq2rRpSk1N5fsBfu+7777T6dOnJUmXX365ysrK1LVrV7333nv69NNP9d///d+aOnWq3nnnHf3444+KiopSSEiIYmNjvTxy+CKCbiCAnDp1SmazWXl5eZIkm82miooK9e7dWy+88IJSUlK0ZMkSPfvss5Kk4cOHe3O4gMcEBQWpf//+Cg0NVVhYmPP24cOHKzg4WEePHlVCQoJsNptzSeGIESO8NVzA4xz/1ocPH67Y2FgVFxdr6dKlioiI0LXXXqvIyEhNnTrVefwFF1xQbbkt4G8GDBig7OxsrVy5UkFBQVqxYoXi4uKUm5urWbNmaeXKlUpLS1O7du3Uq1cv5gPqRSM1wM/l5+frxx9/1M8//6xWrVpp5syZeumll/Tee+/JZDI5t39JSkrSk08+qTfffFMHDhwQlSfwR/n5+dq9e7d++eUXpaSkaPbs2ZLk/PfuWELr6MxsMBi0detW7wwW8DDH98OePXuc/+ajoqJ0+vRpbd68WZK9k3lYWJhOnz6t7OxsZ5dmAgz4m6rzQZKGDh2q9PR0PfHEE4qKilJcXJwke/PA+++/Xxs3btR3330nifmAsyPoBvxYZmamRo8ereuvv179+vXTww8/rDFjxuiOO+7QxIkT9cEHH8hoNDq7bDqWCUZERPAFAr9Tcz48++yzslqtslqtMhgMqqio0KlTp2SxWBQeHi5Jmj17tkaOHKkjR454efSAe1WdD/3799eTTz7pXErbs2dPlZeX684779S//vUvZWVladq0aZowYYLWrFnj5ZED7lfz+2H+/Pnq37+/nnnmGWVkZOjXX3/Vr7/+6jw+NjZWI0eOVJs2bbw4arQkBN2An9q1a5cuueQS/cd//IdWrFihhQsXat68eTp27JhmzZqlSZMm6ZprrtHSpUuVm5urkpISbdq0SSEhIc56VsBfVJ0PK1eu1GOPPaY5c+bo999/r1a/7fhvs9msRx55RM8995y2bt2qdu3aeXP4gFvV/H5wzAfHlkiDBw/WlVdeqZUrV2r16tXq0qWLFi9erHvuuYeuzPA7NefDggULNH/+fB04cECTJk3SY489pp07d2r+/Pn64osvlJeXp8WLF+vQoUPq0KGDt4ePFoLu5YAfOnr0qK699loNHjxYixcvlmRfPjt27FjNnz9f4eHhKikp0bfffqu77rpLHTt2VGRkpHJycrRu3ToNHjzYuy8AcKO65sMVV1yhOXPmKCwsTLGxserUqZNKS0s1dOhQJSQkaNOmTdq8ebOz8SDgD+qbDw899JBatWqlnTt3KiMjQxMnTtTAgQNlsVhkMpm8O3DAA+o7X5o7d67Cw8MVExOjrKws3XbbbaqoqFCbNm1UXFys999/n/MlNBiN1AA/ZDAYlJqaqv/6r/9y3vboo4/q448/Vk5OjgoKCpSUlKRFixZp586d+uGHH2Sz2XThhReqa9euXhw54H51zYd169YpNzdXR48eVd++fTV79mz16dNHu3bt0i+//KJvvvmGrB78Tn3zIScnRydOnNCAAQM0bdo0DRw4UJIIuOG36jtfys3NVX5+vnr37q2lS5dq+/btys7OVllZmXr27KmEhAQvjhwtDZluwE+dPHlSkZGRkqQVK1Zo4sSJWrFihUaPHq2MjAzde++9uuKKKzR//nwvjxTwvPrmQ2ZmpnM+zJs3T4sXL9bll1+upKQkL48a8Iz65sMPP/ygWbNm6YorrtDcuXO9PFLA8852vnTPPffoyiuv5HwJTULQDQSA/fv369ixYxoyZIjztnHjxslgMGjt2rVeHBnQ/OqaD0ajUWvWrJHVaqWvAQLG2eYDEEg4X4KnsLwcCABdu3Z1Lhu3Wq0qKytTq1atWDqLgFTXfOjfv78kEXAjoPD9ALgwH+ApnFkAAcZoNGrBggXasmWLrrvuOm8PB/CqqvPh+uuv9/ZwAK/i+wFwYT7Anch0AwHk3Xff1eeff64VK1Zo/fr16tmzp7eHBHgN8wFwYT4ALswHuBuZbiCAJCUl6ciRI/riiy/Y5gIBj/kAuDAfABfmA9yNRmpAgCkvL1dwcLC3hwH4BOYD4MJ8AFyYD3Angm4AAAAAADyE5eUAAAAAAHgIQTcAAAAAAB5C0A0AAAAAgIcQdAMAAAAA4CEE3QAAAAAAeAhBNwAAAAAAHkLQDQAAAACAhxB0AwAQwG655RYZDAYZDAYFBwcrLi5OY8aM0WuvvSar1drg53n99dcVExPjuYECANBCEXQDABDgUlNTlZOTo3379umjjz7SpZdeqjvvvFPjxo1TRUWFt4cHAECLRtANAECAM5vNio+PV8eOHTVkyBDNnj1bq1ev1kcffaTXX39dkrRo0SL1799fERER6ty5s2bMmKFTp05Jkj777DNNnjxZJ06ccGbN582bJ0kqLS3Vvffeq44dOyoiIkIjRozQZ5995p0XCgCAFxB0AwCAM1x22WUaOHCg3nvvPUmS0WjUs88+q6ysLP3973/Xxo0bdd9990mSkpOTtXjxYkVFRSknJ0c5OTm69957JUl33HGHtmzZohUrVmjnzp267rrrlJqaqj179njttQEA0JwMNpvN5u1BAAAA77jllltUUFCgVatWnXHfH/7wB+3cuVO7du06477/+7//07Rp03T06FFJ9pruu+66SwUFBc5jDhw4oO7du+vAgQPq0KGD8/bRo0dr+PDhWrBggdtfDwAAvibI2wMAAAC+yWazyWAwSJI++eQTLVy4UD/++KMKCwtVUVGhkpISFRcXKzw8vNbHZ2RkyGKx6Pzzz692e2lpqdq2bevx8QMA4AsIugEAQK12796txMRE7du3T+PGjdP06dP12GOPqU2bNvryyy916623qqysrM6g+9SpUzKZTNq+fbtMJlO1+1q1atUcLwEAAK8j6AYAAGfYuHGjMjIydPfdd2v79u2yWq16+umnZTTa28H84x//qHZ8SEiILBZLtdsGDx4si8Wiw4cP66KLLmq2sQMA4EsIugEACHClpaXKzc2VxWJRXl6e0tPTtXDhQo0bN06TJk1SZmamysvL9dxzz2n8+PH66quvtHTp0mrP0a1bN506dUobNmzQwIEDFR4ervPPP1833nijJk2apKefflqDBw/WkSNHtGHDBg0YMEBXXnmll14xAADNh+7lAAAEuPT0dCUkJKhbt25KTU3Vp59+qmeffVarV6+WyWTSwIEDtWjRIj3xxBPq16+f3nrrLS1cuLDacyQnJ2vatGm64YYb1K5dOz355JOSpGXLlmnSpEm655571KtXL6Wlpembb75Rly5dvPFSAQBodnQvBwAAAADAQ8h0AwAAAADgIQTdAAAAAAB4CEE3AAAAAAAeQtANAAAAAICHEHQDAAAAAOAhBN0AAAAAAHgIQTcAAAAAAB5C0A0AAAAAgIcQdAMAAAAA4CEE3QAAAAAAeAhBNwAAAAAAHkLQDQAAAACAh/x/TLQWdPZO4eYAAAAASUVORK5CYII=", + "text/plain": [ + "

" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "benchmark_name = \"neighbors.KNeighborsClassifierBenchmark.time_fit\"\n", + "bm1 = all_benchmarks_meas1.summaries.loc[all_benchmarks_meas1.summaries[\"benchmark\"] == benchmark_name].copy()\n", + "bm1[\"sha\"] = bm1.revision.astype(str).map(all_benchmarks_meas1.index_data[\"revision_to_hash\"])\n", + "bm2 = all_benchmarks_meas2.summaries.loc[all_benchmarks_meas2.summaries[\"benchmark\"] == benchmark_name].copy()\n", + "bm2[\"sha\"] = bm2.revision.astype(str).map(all_benchmarks_meas2.index_data[\"revision_to_hash\"])\n", + "\n", + "# plot the revision on the x axis and the time on the y axis\n", + "# make it a line plot with markers\n", + "\n", + "plt.figure(figsize=(10, 5))\n", + "plt.plot(bm1[\"revision\"], bm1[\"time\"], marker=\"o\", linestyle=\"-\", label=\"Measurement 1\")\n", + "plt.plot(bm2[\"revision\"], bm2[\"time\"], marker=\"o\", linestyle=\"-\", label=\"Measurement 2\")\n", + "plt.xlabel(\"Date\")\n", + "plt.ylabel(\"Time (seconds)\")\n", + "plt.title(\"Benchmark Time Over Revisions\")\n", + "plt.xticks(rotation=45)\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "d2e7b36c", + "metadata": {}, + "outputs": [], + "source": [ + "# each hash_info object has a `sha` and a `date`. Plot the\n", + "import json\n", + "\n", + "import pandas as pd\n", + "\n", + "all_commits_pth = \"artifacts/raw/commits_all.jsonl\"\n", + "filtered_commits_pth = \"artifacts/raw/commits_filtered.jsonl\"\n", + "with open(all_commits_pth) as fp:\n", + " all_commits = [json.loads(line.strip().replace(\"'\", '\"')) for line in fp]\n", + "\n", + "with open(filtered_commits_pth) as fp:\n", + " filtered_commits = [json.loads(line.strip()) for line in fp]\n", + "\n", + "all_commits = pd.DataFrame(all_commits)\n", + "filtered_commits = pd.DataFrame(filtered_commits)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "3c616f16", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['idx', 'commit_id', 'repo_name', 'commit_sha'], dtype='object')" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "all_commits.columns" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "1b05b3b9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['idx', 'commit_id', 'repo_name', 'commit_sha', 'url', 'is_accessible',\n", + " 'is_fork', 'is_archived', 'fork_parent', 'forked_at', 'watchers',\n", + " 'stars', 'asv_conf_path', 'sha', 'date', 'message', 'total_additions',\n", + " 'total_deletions', 'total_files_changed', 'files_changed'],\n", + " dtype='object')" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "filtered_commits.columns" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c41fb19", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of commits in intersection: 35\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " 0%| | 0/35 [00:00here for more info. \n", + "\u001b[1;31mView Jupyter log for further details." + ] + } + ], + "source": [ + "# let's make a new jsonl file with just info about these intersection_hashes\n", + "from tqdm import tqdm\n", + "\n", + "from datasmith.execution.utils import _get_commit_info, find_file_in_tree\n", + "\n", + "out_path = \"artifacts/raw/sklearn_commits_filtered_intersection.jsonl\"\n", + "repo_name = \"scikit-learn/scikit-learn\"\n", + "asv_conf_path = find_file_in_tree(repo_name, \"asv.conf.json\")\n", + "# main_df = filtered_commits.copy()\n", + "main_df = all_commits.copy()\n", + "\n", + "intersection_hashes = set(main_df[\"commit_sha\"]).intersection(set(bm1[\"sha\"]))\n", + "subset_commits = main_df[main_df[\"commit_sha\"].isin(intersection_hashes)]\n", + "\n", + "print(f\"Number of commits in intersection: {len(intersection_hashes)}\")\n", + "metadata = []\n", + "for sha in tqdm(intersection_hashes):\n", + " commit_info = _get_commit_info(repo_name, sha)\n", + " commit_meta = pd.json_normalize(commit_info)\n", + " # commit_meta has additional info for ONE row of subset_commits\n", + " metadata.append(commit_meta)\n", + "\n", + "metadata = pd.concat(metadata)\n", + "# only merge metadata if the columns don't already exist in subset_commits\n", + "if not any(col in subset_commits.columns for col in metadata.columns):\n", + " subset_commits = subset_commits.merge(\n", + " metadata, how=\"left\", left_on=\"commit_sha\", right_on=\"sha\", suffixes=(\"\", \"_meta\")\n", + " )\n", + " subset_commits[\"asv_conf_path\"] = [asv_conf_path] * len(subset_commits)\n", + "\n", + "subset_commits.to_json(out_path, orient=\"records\", lines=True, index=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "64c53b9f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(list(intersection_hashes))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/scripts/benchmark_commits.py b/scripts/benchmark_commits.py index 276d4c4..e5d6c3b 100644 --- a/scripts/benchmark_commits.py +++ b/scripts/benchmark_commits.py @@ -2,13 +2,14 @@ import argparse import asyncio +import json import math import os from pathlib import Path import pandas as pd -from src.datasmith.docker.orchestrator import ( +from datasmith.docker.orchestrator import ( ensure_image, get_docker_client, orchestrate, @@ -56,6 +57,11 @@ def parse_args() -> argparse.Namespace: default=Path("src/datasmith/docker"), help="Directory containing the Dockerfile and other necessary files for building the ASV image.", ) + parser.add_argument( + "--dep-recs", + type=Path, + help="An optional json file with recommended dependencies for each package. passed as pip install before installation.", + ) return parser.parse_args() @@ -63,10 +69,23 @@ def main() -> None: args = parse_args() commits = pd.read_json(args.filtered_commits, lines=True) + commits["repo_name"] = commits["repo_name"].str.lower() + + if not args.dep_recs.exists(): + recommended_deps = {} + else: + with open(args.dep_recs) as f: + recommended_deps = json.load(f) + + commits["dep_recs"] = "" + for query, custom_deps in recommended_deps.items(): + valid_idxs = commits.query(query).index + commits.loc[valid_idxs, "dep_recs"] = [custom_deps] * len(valid_idxs) repo_urls = ("https://www.github.com/" + commits["repo_name"]).tolist() commit_shas = commits["commit_sha"].tolist() asv_conf_paths = [paths[0] for paths in commits["asv_conf_path"].tolist()] + dependency_recs = commits["dep_recs"].tolist() # if repo_name is scikit-learn/scikit-learn -> docker container name is `asv-scikit-learn-scikit-learn` docker_image_names = [f"asv-{repo_url.split('/')[-2]}-{repo_url.split('/')[-1]}" for repo_url in repo_urls] max_concurrency = ( @@ -76,7 +95,7 @@ def main() -> None: args.num_cores = max(1, args.num_cores) # Ensure at least 1 core is used - if args.num_cores * max_concurrency > os.cpu_count(): + if args.num_cores * max_concurrency > (os.cpu_count() or 1): raise ValueError() n_cores = args.num_cores @@ -92,7 +111,7 @@ def main() -> None: visited = set() for image_name, repo_url in zip(docker_image_names, repo_urls): if image_name not in visited: - ensure_image(client, image_name, repo_url, docker_dir=args.docker_dir) + ensure_image(client, image_name, repo_url, docker_dir=str(args.docker_dir)) visited.add(image_name) asyncio.run( @@ -101,6 +120,7 @@ def main() -> None: asv_conf_paths=asv_conf_paths, docker_image_names=docker_image_names, asv_args=asv_args, + recommended_deps=dependency_recs, max_concurrency=max_concurrency, n_cores=n_cores, output_dir=args.output_dir.absolute(), diff --git a/scripts/collect_commits.py b/scripts/collect_commits.py index d5116b9..741dc48 100644 --- a/scripts/collect_commits.py +++ b/scripts/collect_commits.py @@ -2,7 +2,8 @@ import pandas as pd -from datasmith.execution.collect_commits import search_commits +# from datasmith.execution.collect_commits import search_commits +from datasmith.execution.collect_commits_offline import search_commits from datasmith.logging_config import configure_logging # Configure logging for the script diff --git a/scripts/detect_breakpoints.py b/scripts/detect_breakpoints.py index daad72a..a4f4be1 100644 --- a/scripts/detect_breakpoints.py +++ b/scripts/detect_breakpoints.py @@ -1,4 +1,5 @@ import argparse +import json from pathlib import Path from datasmith.benchmark.collection import BenchmarkCollection @@ -42,6 +43,15 @@ def parse_args() -> argparse.Namespace: metavar="PAT", help="Restrict coverage queries to files whose paths contain PAT (repeatable).", ) + parser.add_argument( + "--commit-urls-location", + type=Path, + default=None, + help=( + "Path to a JSON file containing default commit URLs when show_commit_url is '#'. " + "If not provided, the script will not resolve '#' commit URLs and throw an error." + ), + ) parser.add_argument( "--method", @@ -64,6 +74,12 @@ def parse_args() -> argparse.Namespace: def main() -> None: # pragma: no cover - CLI glue args = parse_args() + if args.commit_urls_location is not None: + with open(args.commit_urls_location) as f: + commit_urls_dict = json.load(f) + else: + commit_urls_dict = None + dataset_path = args.dataset.expanduser().resolve() collection = BenchmarkCollection.load(dataset_path) summary_df = collection.summaries @@ -75,19 +91,19 @@ def main() -> None: # pragma: no cover - CLI glue coverage_df = generate_coverage_dataframe( breakpoints, index_data=collection.index_data, + commit_urls=commit_urls_dict, only=args.only, ) collection.coverage = coverage_df - - if args.build_reports and args.compute_coverage: - logger.info("Building GitHub commit reports and merged dataframe ...") - new_breakpoints_df, comments_df = breakpoints_scrape_comments( - breakpoints_df=breakpoints, - coverage_df=coverage_df, - index_data=collection.index_data, - ) - collection.comments = comments_df - collection.enriched_breakpoints = new_breakpoints_df + if args.build_reports: + logger.info("Building GitHub commit reports and merged dataframe ...") + new_breakpoints_df, comments_df = breakpoints_scrape_comments( + breakpoints_df=breakpoints, + coverage_df=coverage_df, + index_data=collection.index_data, + ) + collection.comments = comments_df + collection.enriched_breakpoints = new_breakpoints_df # Save the collection. collection.save(dataset_path.parent / "breakpoints.fc.pkl") diff --git a/scripts/download_dataset.py b/scripts/download_dataset.py index a417600..36ddee8 100644 --- a/scripts/download_dataset.py +++ b/scripts/download_dataset.py @@ -1,4 +1,5 @@ import argparse +from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path import pandas as pd @@ -16,29 +17,66 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--dashboards", type=str, + required=True, help="A JSON Lines file containing benchmark urls and corresponding output directories", ) parser.add_argument("--force", action="store_true", help="Force re-download of files.") + parser.add_argument( + "--num_workers", + type=int, + default=0, + help="Number of workers to use for parallel downloading (0 = sequential)", + ) return parser.parse_args() +def process_dashboard(row, force: bool) -> tuple[Path, BenchmarkCollection]: + out_path = Path(row["output_dir"]) / "dashboard.fc.pkl" + dashboard_collection: BenchmarkCollection | None = make_benchmark_from_html( + base_url=row["url"], html_dir=row["output_dir"], force=force + ) + if dashboard_collection is None: + raise ValueError(f"Failed to create benchmark collection from {row['url']}") # noqa: TRY003 + dashboard_collection.save(path=out_path) + return out_path, dashboard_collection + + +def log_dashboard_results(row, out_path: Path, dashboard_collection: BenchmarkCollection) -> None: + logger.info( + "Saved %s benchmark rows and %s summary rows -> %s", + f"{len(dashboard_collection.benchmarks):,}", + f"{len(dashboard_collection.summaries):,}", + out_path, + ) + logger.info("Data downloaded to %s", row["output_dir"]) + + +def run_sequential(dashboards: pd.DataFrame, force: bool) -> None: + for _, row in dashboards.iterrows(): + out_path, dashboard_collection = process_dashboard(row, force) + log_dashboard_results(row, out_path, dashboard_collection) + + +def run_parallel(dashboards: pd.DataFrame, force: bool, num_workers: int) -> None: + with ThreadPoolExecutor(max_workers=num_workers) as executor: + futures = {executor.submit(process_dashboard, row, force): row for _, row in dashboards.iterrows()} + for future in as_completed(futures): + row = futures[future] + try: + out_path, dashboard_collection = future.result() + log_dashboard_results(row, out_path, dashboard_collection) + except Exception: + logger.exception("Failed to process %s: %s", row["url"]) + + def main() -> None: args = parse_args() dashboards = pd.read_json(args.dashboards, lines=True) - for _, row in dashboards.iterrows(): - out_path = Path(row["output_dir"]) / "dashboard.fc.pkl" - dashboard_collection: BenchmarkCollection = make_benchmark_from_html( - base_url=row["url"], html_dir=row["output_dir"] - ) - dashboard_collection.save(path=out_path) - logger.info( - "Saved %s benchmark rows and %s summary rows -> %s", - f"{len(dashboard_collection.benchmarks):,}", - f"{len(dashboard_collection.summaries):,}", - out_path, - ) - logger.info("Data downloaded to %s", row["output_dir"]) + if args.num_workers == 0: + run_sequential(dashboards, args.force) + else: + run_parallel(dashboards, args.force, args.num_workers) if __name__ == "__main__": diff --git a/scripts/filter_commits.py b/scripts/filter_commits.py index 790fa68..443916a 100644 --- a/scripts/filter_commits.py +++ b/scripts/filter_commits.py @@ -3,13 +3,15 @@ import argparse import json import re +import tempfile from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from pathlib import Path import pandas as pd +from git import Repo from tqdm.auto import tqdm -from datasmith.execution.utils import _get_commit_info, find_file_in_tree +from datasmith.execution.utils import _get_commit_info_offline, find_file_in_tree from datasmith.logging_config import configure_logging # Configure logging for the script @@ -37,10 +39,11 @@ def _asv_conf_worker(repo_name: str) -> str | None: return find_file_in_tree(repo_name, "asv.conf.json") -def _commit_info_worker(arg_tuple) -> dict | None: +def _commit_info_worker(arg_tuple: tuple[Repo, str]) -> dict | None: """Wrapper for ProcessPool: arg_tuple = (repo_name, sha).""" repo, sha = arg_tuple - return _get_commit_info(repo, sha) + # return _get_commit_info(repo, sha) + return _get_commit_info_offline(repo, sha) NON_CORE_PATTERNS = re.compile( @@ -107,19 +110,48 @@ def main() -> None: commits = commits.merge(benchmarks, how="right", on="repo_name") commits = commits.dropna(subset=["commit_sha"]) - with ProcessPoolExecutor(max_workers=args.procs) as pp: - commits["commit_info"] = list( - tqdm( - pp.map(_commit_info_worker, commits[["repo_name", "commit_sha"]].itertuples(index=False, name=None)), - total=len(commits), - desc="Fetching commit metadata", + all_repo_names = set(commits["repo_name"]) + import IPython + + IPython.embed( + header="I need to figure out why the filter function is taking out commits that seem to have gt info available" + ) # For debugging purposes, remove in production + + # download all repos to a temp dir + with tempfile.TemporaryDirectory(prefix="gh-repos-") as td: + all_repos = {} + for repo_name in tqdm(all_repo_names, desc="Cloning repos"): + repo_name = repo_name.strip("/") + owner, name = repo_name.split("/", 1) + path = Path(td) / f"{owner}__{name}.git" + repo = Repo.clone_from( + f"https://github.com/{repo_name}.git", + path, + bare=True, + # multi_options=["--filter=tree:0"], + multi_options=["--filter=blob:none"], + quiet=True, + ) + all_repos[repo_name] = repo + + commit_info_args: list[tuple[Repo, str]] = [] + for repo_name, commit_sha in commits[["repo_name", "commit_sha"]].itertuples(index=False, name=None): + repo = all_repos[repo_name] + commit_info_args.append((repo, commit_sha)) + + with ProcessPoolExecutor(max_workers=args.procs) as pp: + commits["commit_info"] = list( + tqdm( + pp.map(_commit_info_worker, commit_info_args), + total=len(commits), + desc="Fetching commit metadata", + ) ) - ) - commit_meta = pd.json_normalize(commits.pop("commit_info")) - commits = pd.concat([commits, commit_meta], axis=1) - commits = commits.dropna(subset=["asv_conf_path", "sha", "date", "message"]) - commits = commits[commits["files_changed"].apply(has_core_file)].reset_index(drop=True) + commit_meta = pd.json_normalize(commits.pop("commit_info")) + commits = pd.concat([commits, commit_meta], axis=1) + commits = commits.dropna(subset=["asv_conf_path", "sha", "date", "message"]) + commits = commits[commits["files_changed"].apply(has_core_file)].reset_index(drop=True) out_path = Path(args.output_pth) if not out_path.parent.exists(): @@ -127,7 +159,7 @@ def main() -> None: # commits.to_csv(out_path, index=False) commits.to_json(out_path, orient="records", lines=True, index=False) - logger.info(f"✔ Wrote {len(commits):,} rows → {out_path}") + logger.info("✔ Wrote %s rows → %s", len(commits), out_path) if __name__ == "__main__": diff --git a/scripts/scrape_repositories.py b/scripts/scrape_repositories.py index 615d7a7..444c30c 100644 --- a/scripts/scrape_repositories.py +++ b/scripts/scrape_repositories.py @@ -56,6 +56,7 @@ def parse_args() -> argparse.Namespace: default=0.3, help="Random extra delay (0-JITTER's) after each call", ) + p.add_argument("--min-stars", type=int, default=500, help="Minimum number of stars to consider a repository") return p.parse_args() @@ -83,6 +84,7 @@ def main() -> None: filtered_df = filter_dashboards(df, url_col="url") # remove airspeed-velocity/asv filtered_df = filtered_df[filtered_df.repo_name != "airspeed-velocity/asv"] + filtered_df = filtered_df[filtered_df.stars >= args.min_stars] if filtered_df.empty: raise ValueError("No dashboards found in the repositories.") # noqa: TRY003 diff --git a/src/datasmith/collation/collate_benchmark_results.py b/src/datasmith/collation/collate_benchmark_results.py index 74a827f..65ceeb0 100644 --- a/src/datasmith/collation/collate_benchmark_results.py +++ b/src/datasmith/collation/collate_benchmark_results.py @@ -111,9 +111,15 @@ def aggregate_benchmark_runs( benchmarks_path = commit_pth / "benchmarks.json" asv_conf_path = commit_pth.parent / "asv.conf.json" - if benchmarks_path.exists() and asv_conf_path.exists(): - _update_json(benchmarks_path, repo_out_dir / "benchmarks.json") - _update_json(asv_conf_path, repo_out_dir / "asv.conf.json") + if not benchmarks_path.exists() or not asv_conf_path.exists(): + logger.warning( + "Skipping commit %s (%s) because benchmarks.json or asv.conf.json is missing.", + commit_id, + commit_metadata.get("repo_name", "unknown"), + ) + continue + _update_json(benchmarks_path, repo_out_dir / "benchmarks.json") + _update_json(asv_conf_path, repo_out_dir / "asv.conf.json") n_runids = 0 machine_data = None for runid in commit_pth.iterdir(): diff --git a/src/datasmith/detection/detect_breakpoints.py b/src/datasmith/detection/detect_breakpoints.py index defc100..e0c2cb1 100644 --- a/src/datasmith/detection/detect_breakpoints.py +++ b/src/datasmith/detection/detect_breakpoints.py @@ -48,8 +48,8 @@ def get_breakpoints_asv(df: pd.DataFrame) -> list[dict] | None: logger.warning("Robustness of the detection may be reduced.") y_sigma = None - _, _, regression_pos = asv.step_detect.detect_regressions( - asv.step_detect.detect_steps( + _, _, regression_pos = asv.step_detect.detect_regressions( # pyright: ignore[reportAttributeAccessIssue] + asv.step_detect.detect_steps( # pyright: ignore[reportAttributeAccessIssue] y=-1 * y, w=y_sigma, ) diff --git a/src/datasmith/docker/Dockerfile b/src/datasmith/docker/Dockerfile index d693b14..bf69225 100644 --- a/src/datasmith/docker/Dockerfile +++ b/src/datasmith/docker/Dockerfile @@ -1,5 +1,8 @@ FROM buildpack-deps:jammy +RUN ["apt-get", "update"] +RUN ["apt-get", "install", "-y", "vim"] +RUN ["apt-get", "install", "-y", "curl", "git", "build-essential"] ARG REPO_URL RUN curl -Ls "https://micromamba.snakepit.net/api/micromamba/linux-64/latest" \ diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index 145f844..6c0b854 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -4,16 +4,23 @@ : "${COMMIT_SHA:?Need to set COMMIT_SHA}" : "${ASV_ARGS:?Need to set ASV_ARGS}" : "${ASV_CONF_PATH:?Need to set ASV_CONF_PATH}" +: "${RECOMMENDED_DEPS:?Need to set RECOMMENDED_DEPS}" # 0) Hook in micromamba and activate `base` eval "$(micromamba shell hook --shell=bash)" micromamba activate base +# # COMMIT_SHA=0c65bbfe8ce816a181780d2a249c94dd653e115a +# # COMMIT_SHA=ee5d94e0a05da11272a4af1cd731f9822565048e + +# pip install pipenv +# pipenv install pyproject.toml # 0.5) Tune the container so all CPUs stay at fixed frequency. # This requires root; Docker runs as root by default. # python -m pyperf system tune || true git checkout --quiet "${COMMIT_SHA}" +ROOT_PATH=${PWD} # 2) cd into the folder containing the asv.conf.json cd "$(dirname "$ASV_CONF_PATH")" @@ -35,7 +42,7 @@ python_versions=$(python -c "import asv; pythons = asv.config.Config.load('asv.c for version in $python_versions; do # Create per‑Python env and install ASV python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/' '\"$version\"') +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') path.mkdir(parents=True, exist_ok=True) config = asv.config.Config.load('asv.conf.json') @@ -46,15 +53,16 @@ asv.util.write_json('asv.conf.json', config.__dict__, api_version=1) asv.util.write_json(path / 'asv.conf.json', config.__dict__, api_version=1) " - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" \ - git \ - pyperf \ - libmambapy \ - mamba \ - conda - micromamba run -n "asv_${version}" pip install asv - micromamba run asv machine --yes - micromamba run asv run --show-stderr "$COMMIT_SHA^!" ${ASV_ARGS} + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + # micromamba run -n "asv_${version}" pip install asv + # micromamba run -n "asv_${version}" pip install -e "${ROOT_PATH}" + # if [ -n "$RECOMMENDED_DEPS" ]; then + # # skip command if RECOMMENDED_DEPS="" + # micromamba run -n "asv_${version}" pip install "${RECOMMENDED_DEPS}" + # fi + micromamba run -n "asv_${version}" asv machine --yes + micromamba run -n "asv_${version}" asv run --show-stder ${ASV_ARGS} done echo "Benchmarks complete." diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 1696de2..7218bb1 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -51,6 +51,7 @@ async def run_container( cores: str | Sequence[int], commit_sha: str, asv_conf_path: str, + recommended_deps: str, image: str, asv_args: str, output_dir: Path, @@ -70,6 +71,7 @@ async def run_container( "ASV_CONF_PATH": asv_conf_path, # asv can take a comma-separated list for --cpu-affinity "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores}", + "RECOMMENDED_DEPS": recommended_deps, } def _launch() -> int: @@ -78,11 +80,12 @@ def _launch() -> int: # Log the exact command a human could copy-paste logger.info( - "$ docker run --rm --name %s -e COMMIT_SHA=%s -e ASV_CONF_PATH=%s -e ASV_ARGS='%s' --cpuset-cpus %s %s", + "$ docker run --rm --name %s -e COMMIT_SHA=%s -e ASV_CONF_PATH=%s -e ASV_ARGS='%s' -e RECOMMENDED_DEPS='%s' --cpuset-cpus %s %s", container_name, commit_sha, asv_conf_path, env["ASV_ARGS"], + env["RECOMMENDED_DEPS"], cpuset, image, ) @@ -117,6 +120,7 @@ def _launch() -> int: async def orchestrate( commit_shas: Sequence[str], asv_conf_paths: Sequence[str], + recommended_deps: Sequence[str], docker_image_names: Sequence[str], asv_args: str, max_concurrency: int, @@ -137,7 +141,7 @@ async def orchestrate( for s in core_sets: core_pool.put_nowait(s) - async def worker(idx: int, commit_sha: str, asv_conf_path: str, image: str) -> int: + async def worker(idx: int, commit_sha: str, asv_conf_path: str, recommended_deps: str, image: str) -> int: core_set = await core_pool.get() # blocks until a free set exists cpuset_str = ",".join(map(str, core_set)) # "0,1,2,3" @@ -149,6 +153,7 @@ async def worker(idx: int, commit_sha: str, asv_conf_path: str, image: str) -> i cores=cpuset_str, commit_sha=commit_sha, asv_conf_path=asv_conf_path, + recommended_deps=recommended_deps, image=image, asv_args=asv_args, output_dir=output_dir, @@ -161,8 +166,10 @@ async def worker(idx: int, commit_sha: str, asv_conf_path: str, image: str) -> i core_pool.put_nowait(core_set) tasks = [ - asyncio.create_task(worker(i, sha, conf, img)) - for i, (sha, conf, img) in enumerate(zip(commit_shas, asv_conf_paths, docker_image_names)) + asyncio.create_task(worker(i, sha, conf, rec_deps, img)) + for i, (sha, conf, rec_deps, img) in enumerate( + zip(commit_shas, asv_conf_paths, recommended_deps, docker_image_names) + ) ] results = await asyncio.gather(*tasks) diff --git a/src/datasmith/execution/collect_commits_offline.py b/src/datasmith/execution/collect_commits_offline.py new file mode 100644 index 0000000..8593a9c --- /dev/null +++ b/src/datasmith/execution/collect_commits_offline.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +import os +import re +import tempfile +import urllib.parse +from pathlib import Path + +from git import GitCommandError, Repo + +from datasmith import logger +from datasmith.utils import CACHE_LOCATION, _get_github_metadata, cache_completion + +_PR_MERGE_PATTERNS: tuple[re.Pattern[str], ...] = ( + # standard "Merge pull request #123 ..." + re.compile(r"Merge pull request #(\d+)\b"), + # squash-merge style "... (#[0-9]+)" on the last line + re.compile(r"\(#(\d+)\)"), +) + + +def _default_branch(repo: Repo) -> str: + """ + Resolve the remote's default branch (origin/HEAD -> "main" / "master" / ...). + """ + try: + # “origin/main” + full_ref: str = repo.git.symbolic_ref("--quiet", "--short", "refs/remotes/origin/HEAD") + return full_ref.split("/", 1)[1] # keep text after "origin/" + except Exception: + # Fallback if symbolic-ref is missing (rare). + return repo.head.reference.name + + +def _is_pr_merge(message: str) -> bool: + """ + True iff *message* matches one of our PR-closing patterns. + """ + return any(p.search(message) for p in _PR_MERGE_PATTERNS) + + +def _is_public(repo_name: str) -> bool: + """ + Check if a repo is public. + """ + return _get_github_metadata(f"/repos/{repo_name}") is not None + + +@cache_completion(CACHE_LOCATION, "search_commits_offline") +def search_commits( + repo_name: str, + query: str, + max_pages: int = 100, # ignored (kept for compatibility) + per_page: int = 100, # ignored (kept for compatibility) +) -> list[str]: + """ + Return a list of commit SHAs that closed pull requests, **without** + calling any GitHub API endpoints. Internally: + + • clones the repo (metadata-only) into a tmp dir + • walks the commit history + • selects commits whose message looks like a PR merge + + The only element of *query* we still honour is `base=`. + """ + qs = urllib.parse.parse_qs(query, keep_blank_values=True) + base_branch: str | None = qs.get("base", [None])[0] + + with tempfile.TemporaryDirectory(prefix="gh-history-") as workdir: + workdir_path = Path(workdir) + url = f"https://github.com/{repo_name}.git" + + # Clone *just* the commit / tree metadata (no blobs). + clone_kwargs: dict = { + "multi_options": ["--filter=tree:0"], + "no_checkout": True, + } + if base_branch: + clone_kwargs["branch"] = base_branch + + # ignore if repo is not public + try: + repo = Repo.clone_from( + url, + workdir_path, + env={"GIT_TERMINAL_PROMPT": "0", **os.environ}, + **clone_kwargs, + ) + except GitCommandError as e: + if e.status == 128: + msg = e.stderr.strip() or "authentication failed or repository not found" + logger.warning("Cannot clone %s: %s", url, msg) + return [] + raise + + # Figure out which ref to walk. + branch = base_branch or _default_branch(repo) + ref_to_walk = f"origin/{branch}" + + merge_shas: set[str] = set() + for commit in repo.iter_commits(ref_to_walk): + if _is_pr_merge(str(commit.message)): + merge_shas.add(commit.hexsha) + + return sorted(merge_shas) diff --git a/src/datasmith/execution/utils.py b/src/datasmith/execution/utils.py index 4a57a47..acb0724 100644 --- a/src/datasmith/execution/utils.py +++ b/src/datasmith/execution/utils.py @@ -1,7 +1,10 @@ +from typing import Any + +from git import BadName, GitCommandError, Repo from requests.exceptions import HTTPError from datasmith.logging_config import get_logger -from datasmith.utils import _get_github_metadata +from datasmith.utils import CACHE_LOCATION, _get_github_metadata, cache_completion logger = get_logger("execution.utils") @@ -47,6 +50,48 @@ def _get_commit_info(repo_name: str, commit_sha: str) -> dict: } +@cache_completion(CACHE_LOCATION, "get_commit_info_offline") +def _get_commit_info_offline(repo: Repo, commit_sha: str) -> dict[str, Any]: + """ + Return commit metadata and diff stats *without* the GitHub REST API. + + The function creates a temporary **treeless** clone + (`git clone --filter=tree:0 …`) so it transfers only commit objects. + When we later call `commit.stats`, Git will lazily grab just the blobs + needed dto compute line-level stats - still far cheaper than an API call. + """ + try: + commit = repo.commit(commit_sha) + + except (BadName, ValueError): + logger.exception("Maybe commit not found: %s", commit_sha) + repo.git.fetch("--no-filter", "--quiet", "origin", commit_sha) + commit = repo.commit(commit_sha) # retry after fetching + except GitCommandError: + logger.exception("Error fetching commit info: %s", commit_sha) + return { + "sha": commit_sha, + "date": None, + "message": None, + "total_additions": 0, + "total_deletions": 0, + "total_files_changed": 0, + "files_changed": "", + } + + stats = commit.stats + + return { + "sha": commit.hexsha, + "date": commit.committed_datetime.isoformat(), + "message": commit.message, + "total_additions": stats.total["insertions"], + "total_deletions": stats.total["deletions"], + "total_files_changed": stats.total["files"], + "files_changed": "\n".join(str(k) for k in stats.files), + } + + def find_file_in_tree(repo: str, filename: str, branch: str | None = None) -> list[str] | None: if branch is None: repo_info = _get_github_metadata(endpoint=f"/repos/{repo}") diff --git a/src/datasmith/logging_config.py b/src/datasmith/logging_config.py index 3f72878..d5ecbb3 100644 --- a/src/datasmith/logging_config.py +++ b/src/datasmith/logging_config.py @@ -8,14 +8,14 @@ import logging import sys -from typing import Optional +from typing import Optional, TextIO def configure_logging( level: int = logging.INFO, format_string: Optional[str] = None, date_format: str = "%H:%M:%S", - stream: Optional[object] = None, + stream: Optional[TextIO] = None, ) -> logging.Logger: """ Configure logging for the datasmith package. diff --git a/src/datasmith/scrape/code_coverage.py b/src/datasmith/scrape/code_coverage.py index 82b0240..ba7039c 100644 --- a/src/datasmith/scrape/code_coverage.py +++ b/src/datasmith/scrape/code_coverage.py @@ -66,11 +66,18 @@ def generate_coverage_dataframe( breakpoints_df: pd.DataFrame, index_data: dict[str, typing.Any], *, + commit_urls: dict[str, str] | None = None, only: list[str] | None = None, ) -> pd.DataFrame: """Retrieve per-file coverage numbers for **all** commits referenced.""" base = index_data["show_commit_url"].rstrip("/") + if base == "#" and (commit_urls is not None) and (index_data["project_url"] in commit_urls): + base = commit_urls[index_data["project_url"]] + elif base == "#": + raise ValueError( # noqa: TRY003 + f"Base URL '{base}' is not set and {index_data['project_url']} is not in commit_urls. Please provide a valid base URL." + ) # Include both ground-truth and observed hashes if present url_cols = [c for c in breakpoints_df.columns if c.endswith("hash")] @@ -83,7 +90,7 @@ def generate_coverage_dataframe( seen: set[str] = set() filtered = [] for typ, u in all_urls: - if u not in seen: + if u not in seen and len(u): seen.add(u) filtered.append((typ, u)) diff --git a/src/datasmith/scrape/scrape_dashboards.py b/src/datasmith/scrape/scrape_dashboards.py index 862ce22..75ce132 100644 --- a/src/datasmith/scrape/scrape_dashboards.py +++ b/src/datasmith/scrape/scrape_dashboards.py @@ -6,6 +6,7 @@ from pathlib import Path from typing import Callable +import asv # type: ignore[import-untyped] import pandas as pd from tqdm import tqdm @@ -16,15 +17,15 @@ logger = get_logger("scrape.scrape_dashboards") -def make_graph_dir(param_dict: dict, all_keys: list, *, quote: bool) -> str: - parts = [] - for k in all_keys: - v = param_dict.get(k) - seg = f"{k}-{v}" if v not in ("", None) else k - if quote: - seg = urllib.parse.quote(seg, safe="()-") - parts.append(seg) - return "graphs/" + "/".join(parts) + "/" +# def make_graph_dir(param_dict: dict, all_keys: list, *, quote: bool) -> str: +# parts = [] +# for k in all_keys: +# v = param_dict.get(k) +# seg = f"{k}-{v}" if v not in ("", None) else k +# if quote: +# seg = urllib.parse.quote(seg, safe="()-") +# parts.append(seg) +# return "graphs/" + "/".join(parts) + "/" def _make_joiner(base_url: str) -> Callable[..., str]: @@ -51,8 +52,8 @@ def make_benchmark_from_html(base_url: str, html_dir: str, force: bool) -> Bench Extract benchmark metrics from an asv dashboard located either online (http/https) *or* on the local filesystem. """ - parsed = urllib.parse.urlparse(base_url) - is_remote = bool(parsed.scheme) # http / https / file → True + # parsed = urllib.parse.urlparse(base_url) + # is_remote = bool(parsed.scheme) # http / https / file → True join_path = _make_joiner(base_url) html_dir = os.path.abspath(html_dir) @@ -74,10 +75,11 @@ def make_benchmark_from_html(base_url: str, html_dir: str, force: bool) -> Bench frames = [] for p in tqdm(param_sets, desc="machines"): - graph_dir = make_graph_dir(p, all_keys, quote=is_remote) + # graph_dir = make_graph_dir(p, all_keys, quote=is_remote) for bench in tqdm(benchmarks, desc="benchmarks", leave=False): - url = join_path(graph_dir, f"{bench}.json") - local = dl_and_open(url, html_dir, base=base_url, force=force) + bench_url = asv.graph.Graph.get_file_path(params=p, benchmark_name=f"{bench}.json") + full_url = join_path(base_url, bench_url) + local = dl_and_open(full_url, html_dir, base=base_url, force=force) if local is None: continue try: diff --git a/src/datasmith/scrape/utils.py b/src/datasmith/scrape/utils.py index 29c89e5..e11f2e2 100644 --- a/src/datasmith/scrape/utils.py +++ b/src/datasmith/scrape/utils.py @@ -7,7 +7,6 @@ import requests from datasmith.logging_config import get_logger -from datasmith.utils import cache_completion logger = get_logger("scrape.utils") @@ -85,7 +84,6 @@ def _parse_commit_url(url: str) -> tuple[str, str, str]: return owner, repo, sha.lower() -@cache_completion("debug.db", "dl_and_open") def dl_and_open(url: str, dl_dir: str, base: str | None = None, force: bool = False) -> str | None: """ Fetch *url* into *dl_dir* and return the local filename. @@ -98,21 +96,30 @@ def dl_and_open(url: str, dl_dir: str, base: str | None = None, force: bool = Fa is_http = parsed.scheme in ("http", "https") is_file = parsed.scheme == "file" + # ---- derive the URL-relative path, *without* any cleaning ---- rel_path = url[len(base) :].lstrip("/") if base and url.startswith(base) else parsed.path.lstrip("/") + raw_parts = [unquote(p) for p in Path(rel_path).parts] + raw_path = Path(dl_dir).joinpath(*raw_parts).resolve() - def clean_component(comp: str) -> str: - comp = unquote(comp) - comp = comp.replace(" ", "_").replace("@", "AT") - comp = comp.replace("(", "").replace(")", "") - return re.sub(r"[^A-Za-z0-9.\-_/]", "_", comp) + # ---- if that exact path already exists, use it as-is ---- + if raw_path.exists(): + local_path = raw_path + else: - clean_parts = [clean_component(p) for p in Path(rel_path).parts] - local_path = Path(dl_dir).joinpath(*clean_parts).resolve() + def clean_component(comp: str) -> str: + comp = unquote(comp) + comp = comp.replace(" ", "_").replace("@", "AT") + comp = comp.replace("(", "").replace(")", "") + return re.sub(r"[^A-Za-z0-9.\-_/]", "_", comp) + + clean_parts = [clean_component(p) for p in raw_parts] + local_path = Path(dl_dir).joinpath(*clean_parts).resolve() + + # make sure the destination directory exists local_path.parent.mkdir(parents=True, exist_ok=True) - src_path: Path + # ---- download/copy the file just like before ---- if is_http: - # Always (re-)download when force=True or target missing if force or not local_path.exists(): try: r = requests.get(url, timeout=20) @@ -124,13 +131,7 @@ def clean_component(comp: str) -> str: return None return str(local_path) - elif is_file: - src_path = Path(parsed.path) - - else: # plain local path - src_path = Path(url) - - # For file:// and plain local paths we just copy if necessary + src_path = Path(parsed.path) if is_file else Path(url) if not src_path.exists(): return None if force or not local_path.exists(): From 9597c84b60a864f77a5e4c02e86346723c296d45 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 28 Jul 2025 19:41:42 +0000 Subject: [PATCH 02/20] adding info upto now --- scripts/benchmark_commits.py | 4 +- scripts/validate_containers.py | 90 ++++++++++++++++++++++++ src/datasmith/docker/Dockerfile | 2 +- src/datasmith/docker/context.py | 84 ++++++++++++++++++++++ src/datasmith/docker/context_registry.py | 51 ++++++++++++++ src/datasmith/docker/entrypoint.sh | 43 ++++++++--- src/datasmith/docker/orchestrator.py | 2 +- 7 files changed, 261 insertions(+), 15 deletions(-) create mode 100644 scripts/validate_containers.py create mode 100644 src/datasmith/docker/context.py create mode 100644 src/datasmith/docker/context_registry.py diff --git a/scripts/benchmark_commits.py b/scripts/benchmark_commits.py index e5d6c3b..ef562f1 100644 --- a/scripts/benchmark_commits.py +++ b/scripts/benchmark_commits.py @@ -10,7 +10,7 @@ import pandas as pd from datasmith.docker.orchestrator import ( - ensure_image, + build_repo_image, get_docker_client, orchestrate, ) @@ -111,7 +111,7 @@ def main() -> None: visited = set() for image_name, repo_url in zip(docker_image_names, repo_urls): if image_name not in visited: - ensure_image(client, image_name, repo_url, docker_dir=str(args.docker_dir)) + build_repo_image(client, image_name, repo_url, docker_dir=str(args.docker_dir)) visited.add(image_name) asyncio.run( diff --git a/scripts/validate_containers.py b/scripts/validate_containers.py new file mode 100644 index 0000000..6f16d42 --- /dev/null +++ b/scripts/validate_containers.py @@ -0,0 +1,90 @@ +""" +This script builds and validates that each benchmark container can be compiled and will run asv successfully. +""" + +import argparse +from pathlib import Path + +from datasmith.benchmark.collection import BenchmarkCollection +from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.orchestrator import get_docker_client +from datasmith.logging_config import configure_logging +from datasmith.scrape.utils import _parse_commit_url + +# logger = configure_logging(stream=open(Path(__file__).with_suffix(".log"), "a")) +logger = configure_logging() + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + prog="validate_containers", + description="Validate that each benchmark container can be compiled and run with ASV.", + ) + + parser.add_argument( + "--dashboard", + type=Path, + required=True, + help="Path to the dashboard containing the benchmarks.", + ) + parser.add_argument( + "--docker-dir", + type=Path, + default=Path("src/datasmith/docker"), + help="Directory containing the Dockerfile and other necessary files for building the ASV image.", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=Path("output"), + help="Directory where the results will be stored.", + ) + return parser.parse_args() + + +def main(args: argparse.Namespace) -> None: + dashboard = BenchmarkCollection.load(args.dashboard) + all_states = {} + for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {sha} + else: + all_states[(owner, repo)].add(sha) + + client = get_docker_client() + + for (owner, repo), uniq_shas in all_states.items(): + image_name = f"asv-{owner}-{repo}" + docker_ctx = CONTEXT_REGISTRY[image_name] if image_name in CONTEXT_REGISTRY else CONTEXT_REGISTRY["default"] + + docker_ctx.build_container( + client=client, image_name=image_name, repo_url=f"https://www.github.com/{owner}/{repo}", force=True + ) + for sha in uniq_shas: + logger.debug(f"Validating {image_name} for commit {sha}") + container = client.containers.run( + image=image_name, + detach=True, + remove=True, + name=f"asv-{owner}-{repo}-{sha}", + environment={"COMMIT_SHA": sha, "ASV_ARGS": "--bench convolve*"}, + volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, + ) + for line in container.logs(stream=True, follow=True): + logger.info(line.decode().strip()) + + result = container.wait() + if result.get("StatusCode", 1) != 0: + logger.error( + f"Container {image_name} for commit {sha} failed with status code {result.get('StatusCode', 1)}" + ) + else: + logger.info(f"Container {image_name} for commit {sha} completed successfully.") + + logger.info("All containers validated successfully.") + + +if __name__ == "__main__": + args = parse_args() + + main(args) diff --git a/src/datasmith/docker/Dockerfile b/src/datasmith/docker/Dockerfile index bf69225..3013506 100644 --- a/src/datasmith/docker/Dockerfile +++ b/src/datasmith/docker/Dockerfile @@ -5,7 +5,7 @@ RUN ["apt-get", "install", "-y", "vim"] RUN ["apt-get", "install", "-y", "curl", "git", "build-essential"] ARG REPO_URL -RUN curl -Ls "https://micromamba.snakepit.net/api/micromamba/linux-64/latest" \ +RUN curl -Ls "https://micro.mamba.pm/api/micromamba/linux-64/latest" \ | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba ENV MAMBA_ROOT_PREFIX=/opt/conda \ diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py new file mode 100644 index 0000000..f6745d1 --- /dev/null +++ b/src/datasmith/docker/context.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import io +import sys +import tarfile +from copy import deepcopy +from pathlib import Path + +import docker +from docker.errors import DockerException, ImageNotFound + +from datasmith.logging_config import get_logger + +logger = get_logger("docker.context") + + +class DockerContext: + default_dockerfile_loc = Path(__file__).parent / "Dockerfile" + default_entrypoint_loc = Path(__file__).parent / "entrypoint.sh" + dockerfile_data: str + entrypoint_data: str + + def __init__(self, dockerfile_data: str | None = None, entrypoint_data: str | None = None): + if dockerfile_data is None: + dockerfile_data = self.default_dockerfile_loc.read_text() + if entrypoint_data is None: + entrypoint_data = self.default_entrypoint_loc.read_text() + + self.dockerfile_data = dockerfile_data + self.entrypoint_data = entrypoint_data + self.tarball_stream = self.build_tarball_stream() + + def build_tarball_stream(self) -> io.BytesIO: + tar_stream = io.BytesIO() + with tarfile.open(fileobj=tar_stream, mode="w") as tar: + # Add Dockerfile + dockerfile_bytes = self.dockerfile_data.encode("utf-8") + dockerfile_info = tarfile.TarInfo(name="Dockerfile") + dockerfile_info.size = len(dockerfile_bytes) + tar.addfile(dockerfile_info, io.BytesIO(dockerfile_bytes)) + + # Add entrypoint.sh + entrypoint_data = self.entrypoint_data.encode("utf-8") + entrypoint_info = tarfile.TarInfo(name="entrypoint.sh") + entrypoint_info.size = len(entrypoint_data) + entrypoint_info.mode = 0o755 # Make it executable + tar.addfile(entrypoint_info, io.BytesIO(entrypoint_data)) + + tar_stream.seek(0) + return deepcopy(tar_stream) + + def build_container(self, client: docker.DockerClient, image_name: str, repo_url: str, force: bool = False) -> None: + """Builds the Docker image if it does not exist or if force is True.""" + image_exists = False + try: + image = client.images.get(image_name) + image_exists = True + if force: + logger.info("Force rebuild requested. Removing existing Docker image '%s'.", image_name) + client.images.remove(image=image.id, force=True) + image_exists = False + else: + logger.info("Docker image '%s' found locally.", image_name) + except ImageNotFound: + pass # Image doesn't exist or was removed, proceed to build + + if not image_exists: + if repo_url: + logger.info("Building Docker image '%s' with REPO_URL=%s", image_name, repo_url) + try: + client.images.build( + fileobj=self.tarball_stream, + custom_context=True, + tag=image_name, + buildargs={"REPO_URL": repo_url}, + rm=True, + ) + except DockerException as exc: + sys.exit(f"Failed to build image {image_name}: {exc}") + else: + raise RuntimeError(f"Docker image '{image_name}' not found and no REPO_URL provided for build.") # noqa: TRY003 + + if not client.images.get(image_name): + raise RuntimeError(f"Image '{image_name}' failed to build and is not found.") # noqa: TRY003 diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py new file mode 100644 index 0000000..4ec9137 --- /dev/null +++ b/src/datasmith/docker/context_registry.py @@ -0,0 +1,51 @@ +from datasmith.docker.context import DockerContext + +DEFAULT_DOCKER_CONTEXT = DockerContext() + +ASTROPY_DOCKER_CONTEXT = DockerContext( + dockerfile_data=""" +FROM buildpack-deps:jammy + +RUN ["apt-get", "update"] +RUN ["apt-get", "install", "-y", "vim"] +RUN ["apt-get", "install", "-y", "curl", "git", "build-essential"] +ARG REPO_URL + +RUN curl -Ls "https://micro.mamba.pm/api/micromamba/linux-64/latest" \ + | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba + +ENV MAMBA_ROOT_PREFIX=/opt/conda \ + PATH=/opt/conda/bin:$PATH \ + MAMBA_DOCKERFILE_ACTIVATE=1 \ + OPENBLAS_NUM_THREADS=1 \ + MKL_NUM_THREADS=1 \ + OMP_NUM_THREADS=1 + +RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ + python=3.10 \ + git \ + asv \ + pyperf \ + libmambapy \ + mamba \ + conda \ + && micromamba clean --all --yes + +RUN mkdir /workspace /output +WORKDIR /workspace + +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +RUN git clone https://github.com/astropy/astropy /workspace/repo +WORKDIR /workspace/repo +RUN git clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch +ENTRYPOINT ["/entrypoint.sh"] +""".strip(), + entrypoint_data=DEFAULT_DOCKER_CONTEXT.entrypoint_data, +) + +CONTEXT_REGISTRY: dict[str, DockerContext] = { + "default": DEFAULT_DOCKER_CONTEXT, + "asv-astropy-astropy": ASTROPY_DOCKER_CONTEXT, +} diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index 6c0b854..03fd46e 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -3,14 +3,29 @@ : "${COMMIT_SHA:?Need to set COMMIT_SHA}" : "${ASV_ARGS:?Need to set ASV_ARGS}" -: "${ASV_CONF_PATH:?Need to set ASV_CONF_PATH}" -: "${RECOMMENDED_DEPS:?Need to set RECOMMENDED_DEPS}" +# : "${ASV_CONF_PATH:?Need to set ASV_CONF_PATH}" +# : "${RECOMMENDED_DEPS:?Need to set RECOMMENDED_DEPS}" + +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} # 0) Hook in micromamba and activate `base` eval "$(micromamba shell hook --shell=bash)" micromamba activate base # # COMMIT_SHA=0c65bbfe8ce816a181780d2a249c94dd653e115a # # COMMIT_SHA=ee5d94e0a05da11272a4af1cd731f9822565048e +# COMMIT_SHA=410d8268b243f0702ca605eda5a6732376a4a557 +# COMMIT_SHA=3d01a24f32ab86afd55e9918cc22dea14a21bb97 # pip install pipenv # pipenv install pyproject.toml @@ -18,11 +33,11 @@ micromamba activate base # 0.5) Tune the container so all CPUs stay at fixed frequency. # This requires root; Docker runs as root by default. # python -m pyperf system tune || true -git checkout --quiet "${COMMIT_SHA}" +git checkout "${COMMIT_SHA}" ROOT_PATH=${PWD} # 2) cd into the folder containing the asv.conf.json -cd "$(dirname "$ASV_CONF_PATH")" +cd_asv_json_dir || exit 1 # asv run "$COMMIT_SHA^!" \ # --show-stderr \ @@ -35,24 +50,30 @@ cd "$(dirname "$ASV_CONF_PATH")" # ${CPU_CORE:+-a cpu_affinity=[$CPU_CORE]} \ # | tee "$OUTPUT_DIR/benchmark_${COMMIT_SHA}.log" +# the conf name is one of "asv.conf.json" or "asv.ci.conf.json" or "asv.*.json" +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi + # change the "results_dir" in asv.conf.json to "/output/{COMMIT_SHA}/" # using python # Read the python versions from the asv.conf.json (without jq) -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('asv.conf.json').pythons; print(' '.join(pythons))") +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") for version in $python_versions; do # Create per‑Python env and install ASV python -c "import asv, os, pathlib path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') path.mkdir(parents=True, exist_ok=True) -config = asv.config.Config.load('asv.conf.json') +config = asv.config.Config.load('$CONF_NAME') config.results_dir = str(path / 'results') config.html_dir = str(path / 'html') -asv.util.write_json('asv.conf.json', config.__dict__, api_version=1) -asv.util.write_json(path / 'asv.conf.json', config.__dict__, api_version=1) +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv # micromamba run -n "asv_${version}" pip install asv @@ -61,8 +82,8 @@ asv.util.write_json(path / 'asv.conf.json', config.__dict__, api_version=1) # # skip command if RECOMMENDED_DEPS="" # micromamba run -n "asv_${version}" pip install "${RECOMMENDED_DEPS}" # fi - micromamba run -n "asv_${version}" asv machine --yes - micromamba run -n "asv_${version}" asv run --show-stder ${ASV_ARGS} + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME done echo "Benchmarks complete." diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 7218bb1..5029062 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -21,7 +21,7 @@ def get_docker_client() -> docker.DockerClient: sys.exit(f"Could not connect to Docker daemon: {exc}") -def ensure_image(client: docker.DockerClient, image_name: str, repo_url: str, docker_dir: str) -> None: +def build_repo_image(client: docker.DockerClient, image_name: str, repo_url: str, docker_dir: str) -> None: """Ensure IMAGE exists locally, optionally pulling it.""" try: client.images.get(image_name) From 0aeb99a003eee9fa7ca72e280ae4264837d55de9 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 28 Jul 2025 23:20:22 +0000 Subject: [PATCH 03/20] update again --- scripts/validate_containers.py | 25 ++-- src/datasmith/docker/Dockerfile | 30 ++--- src/datasmith/docker/context.py | 35 ++++-- src/datasmith/docker/context_registry.py | 141 +++++++++++++++-------- src/datasmith/docker/docker_build.sh | 40 +++++++ src/datasmith/docker/entrypoint.sh | 6 +- 6 files changed, 194 insertions(+), 83 deletions(-) create mode 100644 src/datasmith/docker/docker_build.sh diff --git a/scripts/validate_containers.py b/scripts/validate_containers.py index 6f16d42..33a2720 100644 --- a/scripts/validate_containers.py +++ b/scripts/validate_containers.py @@ -3,6 +3,7 @@ """ import argparse +import logging from pathlib import Path from datasmith.benchmark.collection import BenchmarkCollection @@ -11,8 +12,8 @@ from datasmith.logging_config import configure_logging from datasmith.scrape.utils import _parse_commit_url -# logger = configure_logging(stream=open(Path(__file__).with_suffix(".log"), "a")) -logger = configure_logging() +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "a")) +logger = configure_logging(level=logging.DEBUG) def parse_args() -> argparse.Namespace: @@ -54,20 +55,26 @@ def main(args: argparse.Namespace) -> None: client = get_docker_client() for (owner, repo), uniq_shas in all_states.items(): - image_name = f"asv-{owner}-{repo}" - docker_ctx = CONTEXT_REGISTRY[image_name] if image_name in CONTEXT_REGISTRY else CONTEXT_REGISTRY["default"] - - docker_ctx.build_container( - client=client, image_name=image_name, repo_url=f"https://www.github.com/{owner}/{repo}", force=True - ) for sha in uniq_shas: + image_name = f"asv-{owner}-{repo}-{sha}" + docker_ctx = CONTEXT_REGISTRY[image_name] + docker_ctx.build_container( + client=client, + image_name=image_name, + build_args={ + "REPO_URL": f"https://www.github.com/{owner}/{repo}", + "COMMIT_SHA": sha, + }, + force=True, + ) logger.debug(f"Validating {image_name} for commit {sha}") + # stop any existing container with the same name container = client.containers.run( image=image_name, detach=True, remove=True, name=f"asv-{owner}-{repo}-{sha}", - environment={"COMMIT_SHA": sha, "ASV_ARGS": "--bench convolve*"}, + environment={"ASV_ARGS": "--quick --python=same"}, volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, ) for line in container.logs(stream=True, follow=True): diff --git a/src/datasmith/docker/Dockerfile b/src/datasmith/docker/Dockerfile index 3013506..d954f02 100644 --- a/src/datasmith/docker/Dockerfile +++ b/src/datasmith/docker/Dockerfile @@ -1,11 +1,13 @@ FROM buildpack-deps:jammy -RUN ["apt-get", "update"] -RUN ["apt-get", "install", "-y", "vim"] -RUN ["apt-get", "install", "-y", "curl", "git", "build-essential"] ARG REPO_URL +ARG COMMIT_SHA +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + curl git build-essential jq && \ + rm -rf /var/lib/apt/lists/* -RUN curl -Ls "https://micro.mamba.pm/api/micromamba/linux-64/latest" \ +RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \ | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba ENV MAMBA_ROOT_PREFIX=/opt/conda \ @@ -16,16 +18,11 @@ ENV MAMBA_ROOT_PREFIX=/opt/conda \ OMP_NUM_THREADS=1 RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ - python=3.10 \ - git \ - asv \ - pyperf \ - libmambapy \ - mamba \ - conda \ - && micromamba clean --all --yes - -RUN mkdir /workspace /output + python=3.10 \ + git asv pyperf mamba conda libmambapy jq && \ + micromamba clean --all --yes + +RUN mkdir -p /workspace /output WORKDIR /workspace COPY entrypoint.sh /entrypoint.sh @@ -33,4 +30,9 @@ RUN chmod +x /entrypoint.sh RUN git clone ${REPO_URL} /workspace/repo WORKDIR /workspace/repo + +COPY docker_build.sh /workspace/repo/docker_build.sh +RUN chmod +x /workspace/repo/docker_build.sh +RUN /workspace/repo/docker_build.sh + ENTRYPOINT ["/entrypoint.sh"] diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index f6745d1..f98947f 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -3,7 +3,6 @@ import io import sys import tarfile -from copy import deepcopy from pathlib import Path import docker @@ -17,18 +16,24 @@ class DockerContext: default_dockerfile_loc = Path(__file__).parent / "Dockerfile" default_entrypoint_loc = Path(__file__).parent / "entrypoint.sh" + default_builder_loc = Path(__file__).parent / "docker_build.sh" dockerfile_data: str entrypoint_data: str + building_data: str - def __init__(self, dockerfile_data: str | None = None, entrypoint_data: str | None = None): + def __init__( + self, building_data: str | None = None, dockerfile_data: str | None = None, entrypoint_data: str | None = None + ): + if building_data is None: + building_data = self.default_builder_loc.read_text() if dockerfile_data is None: dockerfile_data = self.default_dockerfile_loc.read_text() if entrypoint_data is None: entrypoint_data = self.default_entrypoint_loc.read_text() + self.building_data = building_data self.dockerfile_data = dockerfile_data self.entrypoint_data = entrypoint_data - self.tarball_stream = self.build_tarball_stream() def build_tarball_stream(self) -> io.BytesIO: tar_stream = io.BytesIO() @@ -46,10 +51,20 @@ def build_tarball_stream(self) -> io.BytesIO: entrypoint_info.mode = 0o755 # Make it executable tar.addfile(entrypoint_info, io.BytesIO(entrypoint_data)) + # Add docker_build.sh + building_data = self.building_data.encode("utf-8") + builder_info = tarfile.TarInfo(name="docker_build.sh") + builder_info.size = len(building_data) + builder_info.mode = 0o755 # Make it executable + tar.addfile(builder_info, io.BytesIO(building_data)) + + # Reset the stream position to the beginning tar_stream.seek(0) - return deepcopy(tar_stream) + return tar_stream - def build_container(self, client: docker.DockerClient, image_name: str, repo_url: str, force: bool = False) -> None: + def build_container( + self, client: docker.DockerClient, image_name: str, build_args: dict[str, str], force: bool = False + ) -> None: """Builds the Docker image if it does not exist or if force is True.""" image_exists = False try: @@ -65,15 +80,15 @@ def build_container(self, client: docker.DockerClient, image_name: str, repo_url pass # Image doesn't exist or was removed, proceed to build if not image_exists: - if repo_url: - logger.info("Building Docker image '%s' with REPO_URL=%s", image_name, repo_url) + if len(build_args): + build_args_str = ", ".join(f"{k}={v}" for k, v in build_args.items()) + logger.info("Building Docker image '%s' with build args: %s", image_name, build_args_str) try: client.images.build( - fileobj=self.tarball_stream, + fileobj=self.build_tarball_stream(), custom_context=True, tag=image_name, - buildargs={"REPO_URL": repo_url}, - rm=True, + buildargs=build_args, ) except DockerException as exc: sys.exit(f"Failed to build image {image_name}: {exc}") diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index 4ec9137..e9642d6 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -1,51 +1,98 @@ +from __future__ import annotations + from datasmith.docker.context import DockerContext +from datasmith.logging_config import get_logger -DEFAULT_DOCKER_CONTEXT = DockerContext() - -ASTROPY_DOCKER_CONTEXT = DockerContext( - dockerfile_data=""" -FROM buildpack-deps:jammy - -RUN ["apt-get", "update"] -RUN ["apt-get", "install", "-y", "vim"] -RUN ["apt-get", "install", "-y", "curl", "git", "build-essential"] -ARG REPO_URL - -RUN curl -Ls "https://micro.mamba.pm/api/micromamba/linux-64/latest" \ - | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba - -ENV MAMBA_ROOT_PREFIX=/opt/conda \ - PATH=/opt/conda/bin:$PATH \ - MAMBA_DOCKERFILE_ACTIVATE=1 \ - OPENBLAS_NUM_THREADS=1 \ - MKL_NUM_THREADS=1 \ - OMP_NUM_THREADS=1 - -RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ - python=3.10 \ - git \ - asv \ - pyperf \ - libmambapy \ - mamba \ - conda \ - && micromamba clean --all --yes - -RUN mkdir /workspace /output -WORKDIR /workspace - -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -RUN git clone https://github.com/astropy/astropy /workspace/repo -WORKDIR /workspace/repo -RUN git clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch -ENTRYPOINT ["/entrypoint.sh"] -""".strip(), - entrypoint_data=DEFAULT_DOCKER_CONTEXT.entrypoint_data, -) +logger = get_logger("docker.context_registry") + + +class ContextRegistry: + """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" + + def __init__(self, registry: dict[str, DockerContext] | None = None, default_context: DockerContext | None = None): + if registry is None: + registry = {} + self.registry = registry + + if "default" not in self.registry: + if default_context is None: + default_context = DockerContext() + self.registry["default"] = default_context + logger.debug("Default Docker context initialized.") + + def register(self, key: str, context: DockerContext) -> None: + """Register a new Docker context.""" + if key in self.registry: + logger.warning(f"Context '{key}' is already registered, overwriting.") + self.registry[key] = context + logger.debug(f"Registered Docker context: {key}") + + def get(self, key: str) -> DockerContext: + """Retrieve a Docker context by key.""" + if key in self.registry: + return self.registry[key] + # for asv-owner-repo-sha, check if asv-owner-repo exists. + logger.debug(f"Context '{key}' not found in registry. Searching for a matching context.") + owner_repo_key = key.rsplit("-", 1)[0] + if owner_repo_key in self.registry: + logger.debug(f"Found context '{owner_repo_key}' for key '{key}'.") + return self.registry[owner_repo_key] -CONTEXT_REGISTRY: dict[str, DockerContext] = { - "default": DEFAULT_DOCKER_CONTEXT, - "asv-astropy-astropy": ASTROPY_DOCKER_CONTEXT, + logger.error(f"No context found for key '{key}'. Using default context.") + return self.registry["default"] + + def __getitem__(self, key: str) -> DockerContext: + return self.get(key) + + +CONTEXT_REGISTRY = ContextRegistry(default_context=DockerContext()) + +CONTEXT_REGISTRY.register( + "asv-astropy-astropy", + DockerContext( + building_data="""#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi } +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base +ROOT_PATH=${PWD} +git clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" pip install -e . scipy matplotlib +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) diff --git a/src/datasmith/docker/docker_build.sh b/src/datasmith/docker/docker_build.sh new file mode 100644 index 0000000..92216eb --- /dev/null +++ b/src/datasmith/docker/docker_build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" pip install -e . +done diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index 03fd46e..da00b06 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # set -euo pipefail -: "${COMMIT_SHA:?Need to set COMMIT_SHA}" +# : "${COMMIT_SHA:?Need to set COMMIT_SHA}" : "${ASV_ARGS:?Need to set ASV_ARGS}" # : "${ASV_CONF_PATH:?Need to set ASV_CONF_PATH}" # : "${RECOMMENDED_DEPS:?Need to set RECOMMENDED_DEPS}" @@ -33,7 +33,7 @@ micromamba activate base # 0.5) Tune the container so all CPUs stay at fixed frequency. # This requires root; Docker runs as root by default. # python -m pyperf system tune || true -git checkout "${COMMIT_SHA}" +# git checkout "${COMMIT_SHA}" ROOT_PATH=${PWD} # 2) cd into the folder containing the asv.conf.json @@ -74,7 +74,7 @@ config.html_dir = str(path / 'html') asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" + # micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv # micromamba run -n "asv_${version}" pip install asv # micromamba run -n "asv_${version}" pip install -e "${ROOT_PATH}" From 201084769630c5b8573086e8d851f52d138863e2 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Tue, 29 Jul 2025 01:08:45 +0000 Subject: [PATCH 04/20] adding scikit-learn context --- Dockerfile | 72 +++++++++++++++++++----- scripts/benchmark_commits.py | 59 ++++++++++--------- scripts/validate_containers.py | 2 +- src/datasmith/docker/context.py | 12 +++- src/datasmith/docker/context_registry.py | 59 ++++++++++++++++++- src/datasmith/docker/docker_build.sh | 11 +++- src/datasmith/docker/orchestrator.py | 47 +++++++--------- 7 files changed, 185 insertions(+), 77 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1e1b1ed..d317119 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,63 @@ -# Install uv -FROM python:3.12-slim -COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv +FROM buildpack-deps:jammy -# Change the working directory to the `app` directory -WORKDIR /app +ARG REPO_URL +ARG COMMIT_SHA +ARG BUILD_SCRIPT # A build script with custom installation commands provided by the user +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + curl git build-essential jq && \ + rm -rf /var/lib/apt/lists/* -# Copy the lockfile and `pyproject.toml` into the image -COPY uv.lock /app/uv.lock -COPY pyproject.toml /app/pyproject.toml +RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \ + | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba -# Install dependencies -RUN uv sync --frozen --no-install-project +ENV MAMBA_ROOT_PREFIX=/opt/conda \ + PATH=/opt/conda/bin:$PATH \ + MAMBA_DOCKERFILE_ACTIVATE=1 \ + OPENBLAS_NUM_THREADS=1 \ + MKL_NUM_THREADS=1 \ + OMP_NUM_THREADS=1 -# Copy the project into the image -COPY . /app +RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ + python=3.10 \ + git asv pyperf mamba conda libmambapy jq && \ + micromamba clean --all --yes -# Sync the project -RUN uv sync --frozen +RUN mkdir -p /workspace /output +WORKDIR /workspace -CMD [ "python", "datasmith/foo.py" ] +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +RUN git clone "${REPO_URL}" repo && \ + cd repo && \ + git checkout "${COMMIT_SHA}" && \ + \ + CONF_FILE=$(find . -type f -name "asv.*.json" | head -n 1) && \ + if [[ -z "${CONF_FILE}" ]]; then \ + echo "❌ No asv.*.json found." && exit 1; \ + fi && \ + echo "✅ Using ASV config: ${CONF_FILE}" && \ + \ +PY_VERS=$(echo "import json, pathlib; \ +cfg = pathlib.Path('${CONF_FILE}').read_text(); \ +data = json.loads(cfg); \ +vers = data.get('pythons') or data.get('python') or []; \ +print(' '.join(dict.fromkeys(vers)))" | python -) && \ + if [[ -z "${PY_VERS}" ]]; then \ + echo "❌ No Python versions declared in ${CONF_FILE}" && exit 1; \ + fi && \ + echo "🐍 Creating Conda envs for: ${PY_VERS}" && \ + \ + for v in ${PY_VERS}; do \ + micromamba create -y -n "asv_${v}" -c conda-forge \ + python=${v} git mamba conda "libmambapy<=1.9.9"; \ + done + +WORKDIR /workspace/repo + +RUN echo "${BUILD_SCRIPT}" > /workspace/repo/docker_build.sh && \ + chmod +x /workspace/repo/docker_build.sh && \ + /workspace/repo/docker_build.sh + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/scripts/benchmark_commits.py b/scripts/benchmark_commits.py index ef562f1..b8e314b 100644 --- a/scripts/benchmark_commits.py +++ b/scripts/benchmark_commits.py @@ -2,18 +2,24 @@ import argparse import asyncio -import json +import logging import math import os +from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path import pandas as pd from datasmith.docker.orchestrator import ( - build_repo_image, + build_repo_sha_image, get_docker_client, orchestrate, ) +from datasmith.logging_config import configure_logging +from datasmith.scrape.utils import _parse_commit_url + +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log").absolute(), "a")) +logger = configure_logging(level=logging.DEBUG) def parse_args() -> argparse.Namespace: @@ -58,9 +64,9 @@ def parse_args() -> argparse.Namespace: help="Directory containing the Dockerfile and other necessary files for building the ASV image.", ) parser.add_argument( - "--dep-recs", - type=Path, - help="An optional json file with recommended dependencies for each package. passed as pip install before installation.", + "--force-rebuild", + action="store_true", + help="Force rebuild the Docker images even if they already exist.", ) return parser.parse_args() @@ -70,24 +76,8 @@ def main() -> None: commits = pd.read_json(args.filtered_commits, lines=True) commits["repo_name"] = commits["repo_name"].str.lower() + commit_urls = ("https://www.github.com/" + commits["repo_name"] + "/commit/" + commits["commit_sha"]).tolist() - if not args.dep_recs.exists(): - recommended_deps = {} - else: - with open(args.dep_recs) as f: - recommended_deps = json.load(f) - - commits["dep_recs"] = "" - for query, custom_deps in recommended_deps.items(): - valid_idxs = commits.query(query).index - commits.loc[valid_idxs, "dep_recs"] = [custom_deps] * len(valid_idxs) - - repo_urls = ("https://www.github.com/" + commits["repo_name"]).tolist() - commit_shas = commits["commit_sha"].tolist() - asv_conf_paths = [paths[0] for paths in commits["asv_conf_path"].tolist()] - dependency_recs = commits["dep_recs"].tolist() - # if repo_name is scikit-learn/scikit-learn -> docker container name is `asv-scikit-learn-scikit-learn` - docker_image_names = [f"asv-{repo_url.split('/')[-2]}-{repo_url.split('/')[-1]}" for repo_url in repo_urls] max_concurrency = ( args.max_concurrency if args.max_concurrency != -1 else max(4, math.floor(0.5 * (os.cpu_count() or 1))) ) @@ -108,19 +98,28 @@ def main() -> None: client = get_docker_client() # Ensure all required Docker images are available - visited = set() - for image_name, repo_url in zip(docker_image_names, repo_urls): - if image_name not in visited: - build_repo_image(client, image_name, repo_url, docker_dir=str(args.docker_dir)) - visited.add(image_name) + all_states = {} + for owner, repo, sha in map(_parse_commit_url, commit_urls): + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {sha} + else: + all_states[(owner, repo)].add(sha) + + all_states = list(set(map(_parse_commit_url, commit_urls))) + docker_image_names = [] + + with ThreadPoolExecutor(max_workers=args.num_cores * 4) as pool: + futures = [ + pool.submit(build_repo_sha_image, client, owner, repo, sha, args.force_rebuild) + for owner, repo, sha in all_states + ] + for fut in as_completed(futures): + docker_image_names.append(fut.result()) asyncio.run( orchestrate( - commit_shas=commit_shas, - asv_conf_paths=asv_conf_paths, docker_image_names=docker_image_names, asv_args=asv_args, - recommended_deps=dependency_recs, max_concurrency=max_concurrency, n_cores=n_cores, output_dir=args.output_dir.absolute(), diff --git a/scripts/validate_containers.py b/scripts/validate_containers.py index 33a2720..6621968 100644 --- a/scripts/validate_containers.py +++ b/scripts/validate_containers.py @@ -73,7 +73,7 @@ def main(args: argparse.Namespace) -> None: image=image_name, detach=True, remove=True, - name=f"asv-{owner}-{repo}-{sha}", + name=f"asv-{owner}-{repo}-{sha}-validation", environment={"ASV_ARGS": "--quick --python=same"}, volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, ) diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index f98947f..01e8d09 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -14,6 +14,14 @@ class DockerContext: + """ + A docker context stores all the necessary files to build a docker container + for running ASV benchmarks. It includes the Dockerfile, entrypoint script, + and a script to build the container. + + This allows customizing the Docker image without needing to modify the Dockerfile directly. + """ + default_dockerfile_loc = Path(__file__).parent / "Dockerfile" default_entrypoint_loc = Path(__file__).parent / "entrypoint.sh" default_builder_loc = Path(__file__).parent / "docker_build.sh" @@ -81,8 +89,8 @@ def build_container( if not image_exists: if len(build_args): - build_args_str = ", ".join(f"{k}={v}" for k, v in build_args.items()) - logger.info("Building Docker image '%s' with build args: %s", image_name, build_args_str) + build_args_str = " --build-arg ".join(f"{k}={v}" for k, v in build_args.items()) + logger.info("$ docker build -t %s src/datasmith/docker/ --build-arg %s", image_name, build_args_str) try: client.images.build( fileobj=self.build_tarball_stream(), diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index e9642d6..617ad0f 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -38,7 +38,7 @@ def get(self, key: str) -> DockerContext: logger.debug(f"Found context '{owner_repo_key}' for key '{key}'.") return self.registry[owner_repo_key] - logger.error(f"No context found for key '{key}'. Using default context.") + logger.info(f"No context found for key '{key}'. Using default context.") return self.registry["default"] def __getitem__(self, key: str) -> DockerContext: @@ -96,3 +96,60 @@ def __getitem__(self, key: str) -> DockerContext: entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, ), ) + + +CONTEXT_REGISTRY.register( + "asv-scikit-learn-scikit-learn", + DockerContext( + building_data="""#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +apt-get update && \ + apt-get install -y \ + ninja-build \ + cmake + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" pip install meson-python cython + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) diff --git a/src/datasmith/docker/docker_build.sh b/src/datasmith/docker/docker_build.sh index 92216eb..3a72116 100644 --- a/src/datasmith/docker/docker_build.sh +++ b/src/datasmith/docker/docker_build.sh @@ -13,6 +13,12 @@ cd_asv_json_dir() { } eval "$(micromamba shell hook --shell=bash)" micromamba activate base + +apt-get update && \ + apt-get install -y \ + ninja-build \ + cmake + ROOT_PATH=${PWD} cd_asv_json_dir || exit 1 CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") @@ -33,8 +39,9 @@ config.html_dir = str(path / 'html') asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME - micromamba run -n "asv_${version}" pip install -e . + micromamba run -n "asv_${version}" pip install meson-python cython + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} done diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 5029062..8e29e16 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -8,6 +8,7 @@ import docker from docker.errors import DockerException, ImageNotFound +from datasmith.docker.context_registry import CONTEXT_REGISTRY from datasmith.logging_config import get_logger logger = get_logger("docker.orchestrator") @@ -45,13 +46,25 @@ def build_repo_image(client: docker.DockerClient, image_name: str, repo_url: str raise RuntimeError +def build_repo_sha_image(client: docker.DockerClient, owner: str, repo: str, sha: str, force: bool = False) -> str: + image_name = f"asv-{owner}-{repo}-{sha}" + docker_ctx = CONTEXT_REGISTRY[image_name] + docker_ctx.build_container( + client=client, + image_name=image_name, + build_args={ + "REPO_URL": f"https://www.github.com/{owner}/{repo}", + "COMMIT_SHA": sha, + }, + force=force, + ) + return image_name + + async def run_container( client: docker.DockerClient, idx: int, cores: str | Sequence[int], - commit_sha: str, - asv_conf_path: str, - recommended_deps: str, image: str, asv_args: str, output_dir: Path, @@ -67,25 +80,18 @@ async def run_container( cpuset = ",".join(map(str, cores)) if not isinstance(cores, str) else cores num_cores = len(cpuset.split(",")) env = { - "COMMIT_SHA": commit_sha, - "ASV_CONF_PATH": asv_conf_path, - # asv can take a comma-separated list for --cpu-affinity "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores}", - "RECOMMENDED_DEPS": recommended_deps, } def _launch() -> int: - container_name = f"asv_{idx}_{commit_sha[:7]}" + container_name = f"{image.split(':')[0]}-{idx:03d}" logger.debug("docker run name=%s cpuset=%s env=%s", container_name, cpuset, env) # Log the exact command a human could copy-paste logger.info( - "$ docker run --rm --name %s -e COMMIT_SHA=%s -e ASV_CONF_PATH=%s -e ASV_ARGS='%s' -e RECOMMENDED_DEPS='%s' --cpuset-cpus %s %s", + "$ docker run --rm --name %s -e ASV_ARGS='%s' --cpuset-cpus %s %s", container_name, - commit_sha, - asv_conf_path, env["ASV_ARGS"], - env["RECOMMENDED_DEPS"], cpuset, image, ) @@ -118,9 +124,6 @@ def _launch() -> int: async def orchestrate( - commit_shas: Sequence[str], - asv_conf_paths: Sequence[str], - recommended_deps: Sequence[str], docker_image_names: Sequence[str], asv_args: str, max_concurrency: int, @@ -141,19 +144,16 @@ async def orchestrate( for s in core_sets: core_pool.put_nowait(s) - async def worker(idx: int, commit_sha: str, asv_conf_path: str, recommended_deps: str, image: str) -> int: + async def worker(idx: int, image: str) -> int: core_set = await core_pool.get() # blocks until a free set exists cpuset_str = ",".join(map(str, core_set)) # "0,1,2,3" - logger.info("▶︎ cores=%s sha=%s", cpuset_str, commit_sha) + logger.info("▶︎ cores=%s image=%s", cpuset_str, image) try: rc = await run_container( client=client, idx=idx, cores=cpuset_str, - commit_sha=commit_sha, - asv_conf_path=asv_conf_path, - recommended_deps=recommended_deps, image=image, asv_args=asv_args, output_dir=output_dir, @@ -165,12 +165,7 @@ async def worker(idx: int, commit_sha: str, asv_conf_path: str, recommended_deps # Always release the core set, even on failure core_pool.put_nowait(core_set) - tasks = [ - asyncio.create_task(worker(i, sha, conf, rec_deps, img)) - for i, (sha, conf, rec_deps, img) in enumerate( - zip(commit_shas, asv_conf_paths, recommended_deps, docker_image_names) - ) - ] + tasks = [asyncio.create_task(worker(i, img)) for i, img in enumerate(docker_image_names)] results = await asyncio.gather(*tasks) failures = sum(rc != 0 for rc in results) From c54ec72bb9c2a5f3d534433c142cc8ede2b62c1d Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 16 Aug 2025 23:25:28 +0000 Subject: [PATCH 05/20] small changes --- .gitignore | 2 +- Makefile | 4 +- README.md | 68 ++-- .../artifacts}/raw/online_dashboards.jsonl | 0 {notebooks => scratch/notebooks}/cache.db | Bin .../notebooks}/compare_benchmarks.ipynb | 350 +++++++++++++++++- .../scripts}/benchmark_commits.py | 6 +- .../scripts}/collate_benchmark_results.py | 0 .../scripts}/collect_commits.py | 0 .../scripts}/detect_breakpoints.py | 0 .../scripts}/download_dataset.py | 0 .../scripts}/filter_commits.py | 0 .../scripts}/scrape_repositories.py | 0 .../scripts}/validate_containers.py | 0 src/datasmith/docker/entrypoint.sh | 45 +-- src/datasmith/docker/orchestrator.py | 15 +- src/datasmith/scrape/scrape_dashboards.py | 2 +- src/datasmith/scrape/utils.py | 4 - 18 files changed, 401 insertions(+), 95 deletions(-) rename {artifacts => scratch/artifacts}/raw/online_dashboards.jsonl (100%) rename {notebooks => scratch/notebooks}/cache.db (100%) rename {notebooks => scratch/notebooks}/compare_benchmarks.ipynb (86%) rename {scripts => scratch/scripts}/benchmark_commits.py (93%) rename {scripts => scratch/scripts}/collate_benchmark_results.py (100%) rename {scripts => scratch/scripts}/collect_commits.py (100%) rename {scripts => scratch/scripts}/detect_breakpoints.py (100%) rename {scripts => scratch/scripts}/download_dataset.py (100%) rename {scripts => scratch/scripts}/filter_commits.py (100%) rename {scripts => scratch/scripts}/scrape_repositories.py (100%) rename {scripts => scratch/scripts}/validate_containers.py (100%) diff --git a/.gitignore b/.gitignore index 04c9901..c248a46 100644 --- a/.gitignore +++ b/.gitignore @@ -195,4 +195,4 @@ cython_debug/ tokens.env benchmark_results/ -artifacts/ +scratch/artifacts/ diff --git a/Makefile b/Makefile index e3aff8a..66a0e46 100644 --- a/Makefile +++ b/Makefile @@ -16,8 +16,8 @@ backup: ## Create a backup of the datasets, results, and analysis directories echo "❌ Error: BACKUP_DIR not defined in tokens.env"; exit 1; \ fi; \ mkdir -p "$$BACKUP_DIR"; \ - zip -qr "$$BACKUP_DIR/datasmith.bckp" artifacts/benchmark_results artifacts/raw; \ - cp -f artifacts/cache.db "$$BACKUP_DIR/datasmith.cache.bckp"; \ + zip -qr "$$BACKUP_DIR/datasmith.bckp" scratch/artifacts/benchmark_results scratch/artifacts/raw; \ + cp -f scratch/artifacts/cache.db "$$BACKUP_DIR/datasmith.cache.bckp"; \ ' .PHONY: check diff --git a/README.md b/README.md index cbb7ca5..e324c13 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ FormulaCode is designed to benchmark the capabilities of large language models ( ## Data layout The general layout of the artifacts is as follows: ```bash -artifacts/ +scratch/artifacts ├── raw/ # Raw downloads & lists produced by scripts │   ├── downloads/ # Per‑repo dashboard archives │   ├── online_dashboards.jsonl # Updated config for dashboard scraper @@ -61,7 +61,7 @@ $ cat tokens.env GH_TOKEN=github_pat_??? COVERALLS_TOKEN=XdK??? CODECOV_TOKEN=54c6??? -CACHE_LOCATION=/home/???/formulacode/datasmith/artifacts/cache.db +CACHE_LOCATION=/home/???/formulacode/datasmith/scratch/artifacts/cache.db BACKUP_DIR=/home/???/formulacode/backup/ ``` @@ -74,30 +74,30 @@ FormulaCode Lite is a small dataset of 5 repositories with ~440 performance impr ### Scrape online dashboards -Each of these repositories has a publicly accessible perpetually updating dashboard (e.g. Astropy's dashboard lives [here](https://spacetelescope.github.io/bench/astropy-benchmarks)) that tracks the performance of each commit against various benchmarks. These dashboards were manually curated and placed in a file called `artifacts/raw/online_dashboards.jsonl`. +Each of these repositories has a publicly accessible perpetually updating dashboard (e.g. Astropy's dashboard lives [here](https://spacetelescope.github.io/bench/astropy-benchmarks)) that tracks the performance of each commit against various benchmarks. These dashboards were manually curated and placed in a file called `scratch/artifacts/raw/online_dashboards.jsonl`. ```json -{"url": "https://pv.github.io/scipy-bench/", "output_dir": "artifacts/raw/downloads/scipy"} -{"url": "https://pandas-dev.github.io/asv-runner/", "output_dir": "artifacts/raw/downloads/pandas"} -{"url": "https://scikit-learn.org/scikit-learn-benchmarks/", "output_dir": "artifacts/raw/downloads/sklearn"} -{"url": "https://spacetelescope.github.io/bench/astropy-benchmarks/", "output_dir": "artifacts/raw/downloads/astropy"} -{"url": "https://pv.github.io/numpy-bench/", "output_dir": "artifacts/raw/downloads/numpy"} +{"url": "https://asv-runner.github.io/asv-collection/pandas/", "output_dir": "artifacts/processed/downloads/pandas"} +{"url": "https://pv.github.io/scipy-bench/", "output_dir": "artifacts/processed/downloads/scipy"} +{"url": "https://scikit-learn.org/scikit-learn-benchmarks/", "output_dir": "artifacts/processed/downloads/sklearn"} +{"url": "https://spacetelescope.github.io/bench/astropy-benchmarks/", "output_dir": "artifacts/processed/downloads/astropy"} +{"url": "https://pv.github.io/numpy-bench/", "output_dir": "artifacts/processed/downloads/numpy"} ``` As all these dashboards have the same structure, we developed an ethical scraper that can scrape these dashboards and download the performance data in a structured format. The scraper is invoked using `scripts/download_dataset.py` and can be run as follows: ```bash $ python scripts/download_dataset.py \ --force \ - --dashboards artifacts/raw/online_dashboards.jsonl + --dashboards scratch/artifacts/raw/online_dashboards.jsonl # machines: 100%|██████████████████████████████████████| 7/7 [00:56<00:00, 8.05s/it] # Collected 46,143 rows from 805 benchmark files. # summaries: 100%|█████████████████████████████████| 115/115 [00:09<00:00, 12.56it/s] -# Saved 46,143 benchmark rows and 22,577 summary rows -> /home/???/formulacode/datasmith/artifacts/raw/downloads/sklearn/dashboard.fc.pkl -# Data downloaded to artifacts/raw/downloads/sklearn +# Saved 46,143 benchmark rows and 22,577 summary rows -> /home/???/formulacode/datasmith/scratch/artifacts/processed/downloads/sklearn/dashboard.fc.pkl +# Data downloaded to scratch/artifacts/processed/downloads/sklearn # ... ``` -This should create a directory called `artifacts/raw/downloads` that contains the downloaded data for each repository. The data is stored in a structured format that can be easily processed later. More information about the format is available in `datasmith/benchmark/collection.py`. +This should create a directory called `scratch/artifacts/processed/downloads` that contains the downloaded data for each repository. The data is stored in a structured format that can be easily processed later. More information about the format is available in `datasmith/benchmark/collection.py`. ### 2. Detect performance improving commits @@ -113,12 +113,12 @@ $ python scripts/detect_breakpoints.py \ --build-reports \ --method rbf \ --compute-coverage \ - --dataset artifacts/raw/downloads/astropy/dashboard.fc.pkl + --dataset scratch/artifacts/processed/downloads/astropy/dashboard.fc.pkl # Found 1,085 potential downward shifts. # Codecov: 100%|███████████████████████████████| 119/119 [08:50<00:00, 4.46s/commit] # Building GitHub commit reports and merged dataframe ... # Reports: 100%|█████████████████████████████████| 40/40 [02:55<00:00, 4.38s/commit] -# Enriched breakpoints saved to '/home/???/formulacode/datasmith/artifacts/raw/downloads/astropy/breakpoints.fc.pkl'. +# Enriched breakpoints saved to '/home/???/formulacode/datasmith/scratch/artifacts/processed/downloads/astropy/breakpoints.fc.pkl'. ``` The `breakpoints.fc.pkl` collection contains all the information about the detected performance improving commits, a markdown report for each commit with useful hints for the optimizer, and a merged CSV file that contains the performance data for all commits in the repository. These files can then be used in the evaluation harness for benchmarking the performance of an optimizer `[@TODO:link formula-code/evaluation-harness]`. @@ -145,13 +145,13 @@ To run the script, you need to have a GitHub token with `repo` and `read:org` pe The scraper can be run using the following command: ```bash $ python scripts/scrape_repositories.py \ - --outfile artifacts/raw/repos_discovered.csv \ + --outfile scratch/artifacts/processed/repos_discovered.csv \ --min-stars 500 \ - --filtered-outfile artifacts/raw/repos_valid.csv -# Writes artifacts/raw/repos_discovered.csv and artifacts/raw/repos_valid.csv + --filtered-outfile scratch/artifacts/processed/repos_valid.csv +# Writes scratch/artifacts/processed/repos_discovered.csv and scratch/artifacts/processed/repos_valid.csv ``` -The `artifacts/raw/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / has atleast 500 stars / pass other sanity checks. We found ~700 filtered repositories for this dataset. +The `scratch/artifacts/processed/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / has atleast 500 stars / pass other sanity checks. We found ~700 filtered repositories for this dataset. ### 4. Collect relevant commits for all repositories @@ -160,13 +160,13 @@ Given the list of repositories, we find the subset of commits that have already ```bash $ python scripts/collect_commits.py \ - --dashboards artifacts/raw/repos_valid.csv \ - --outfile artifacts/raw/commits_all.jsonl \ + --dashboards scratch/artifacts/raw/repos_valid.csv \ + --outfile scratch/artifacts/raw/commits_all.jsonl \ --max-pages 50 $ python scripts/filter_commits.py \ - --filtered-benchmarks-pth artifacts/raw/repos_valid.csv \ - --merged-commits-pth artifacts/raw/commits_all.jsonl \ - --output-pth artifacts/raw/commits_filtered.jsonl \ + --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid.csv \ + --merged-commits-pth scratch/artifacts/raw/commits_all.jsonl \ + --output-pth scratch/artifacts/raw/commits_filtered.jsonl \ --max-repos 350 \ --threads 8 \ --procs 8 @@ -188,15 +188,15 @@ The `dependency_recommendations.json` file is a dictionary that contains recomme (sudo) $ sudo python -m pyperf system tune # in userspace: $ python scripts/benchmark_commits.py \ - --filtered-commits artifacts/raw/commits_filtered.jsonl \ - --dep-recs artifacts/raw/dependency_recommendations.json \ + --filtered-commits scratch/artifacts/raw/commits_filtered.jsonl \ + --dep-recs scratch/artifacts/raw/dependency_recommendations.json \ --max-concurrency 30 \ --num-cores 2 \ --asv-args "--interleave-rounds --append-samples -a rounds=2 -a repeat=2" \ - --output-dir artifacts/benchmark_results/ + --output-dir scratch/artifacts/benchmark_results/ ``` -Generally, each benchmark takes ~2 minutes to run, so benchmarking 70,000 commits on 16 dedicated 4-core machines takes around 6 days. The script will create a directory called `artifacts/benchmark_results/` that contains the results of the benchmarks for each commit. The results are stored in a structured format that can be easily processed later. +Generally, each benchmark takes ~2 minutes to run, so benchmarking 70,000 commits on 16 dedicated 4-core machines takes around 6 days. The script will create a directory called `scratch/artifacts/benchmark_results/` that contains the results of the benchmarks for each commit. The results are stored in a structured format that can be easily processed later. ### 6. Collate benchmark results @@ -204,9 +204,9 @@ This step aggregates the benchmark results and generates the `*.fc.pkl` file. Th ```bash $ python scripts/collate_benchmark_results.py \ - --results-dir artifacts/benchmark_results/results \ - --output-dir artifacts/benchmark_results/published/ \ - --commit-metadata artifacts/raw/commits_filtered.jsonl \ + --results-dir scratch/artifacts/benchmark_results/results \ + --output-dir scratch/artifacts/benchmark_results/published/ \ + --commit-metadata scratch/artifacts/raw/commits_filtered.jsonl \ --default-machine-name "docker" # machines: 100%|██████████████████████████████████████████████| 1/1 [00:00<00:00, 1.53it/s] # Collected 53,705 rows from 115 benchmark files. @@ -217,7 +217,7 @@ $ python scripts/detect_breakpoints.py \ --build-reports \ --method rbf \ --compute-coverage \ - --dataset artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl + --dataset scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl # ... ``` @@ -229,9 +229,9 @@ How closely do our benchmarked metrics match the original performance improvemen ```bash $ python scripts/replication_experiment.py \ - --dataset1 artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/breakpoints.fc.pkl \ - --dataset2 artifacts/raw/downloads/sklearn/breakpoints.fc.pkl \ - --output-dir artifacts/replication/ + --dataset1 scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/breakpoints.fc.pkl \ + --dataset2 scratch/artifacts/raw/downloads/sklearn/breakpoints.fc.pkl \ + --output-dir scratch/artifacts/replication/ ``` ### Pipeline flowchart diff --git a/artifacts/raw/online_dashboards.jsonl b/scratch/artifacts/raw/online_dashboards.jsonl similarity index 100% rename from artifacts/raw/online_dashboards.jsonl rename to scratch/artifacts/raw/online_dashboards.jsonl diff --git a/notebooks/cache.db b/scratch/notebooks/cache.db similarity index 100% rename from notebooks/cache.db rename to scratch/notebooks/cache.db diff --git a/notebooks/compare_benchmarks.ipynb b/scratch/notebooks/compare_benchmarks.ipynb similarity index 86% rename from notebooks/compare_benchmarks.ipynb rename to scratch/notebooks/compare_benchmarks.ipynb index beec5c8..749cb2e 100644 --- a/notebooks/compare_benchmarks.ipynb +++ b/scratch/notebooks/compare_benchmarks.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "7d96c171", "metadata": {}, "outputs": [ @@ -17,19 +17,353 @@ "source": [ "# replication experiment\n", "%cd /mnt/sdd1/atharvas/formulacode/datasmith\n", + "from pathlib import Path\n", + "\n", + "import pandas as pd\n", + "\n", "from datasmith.benchmark.collection import BenchmarkCollection" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 33, + "id": "7590a13d", + "metadata": {}, + "outputs": [], + "source": [ + "dashboards = list(Path(\"scratch/artifacts/processed/downloads/\").glob(\"*/breakpoints.fc.pkl\"))\n", + "dbs = {}\n", + "for db in dashboards:\n", + " benchmark = BenchmarkCollection.load(db)\n", + " dbs[db.parent.name] = benchmark" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "52583d15", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'numpy': 27,\n", + " 'distributed': 80,\n", + " 'pymc3': 120,\n", + " 'joblib': 78,\n", + " 'sklearn': 10,\n", + " 'pandas': 893,\n", + " 'scikit-image': 186,\n", + " 'dask': 43,\n", + " 'astropy': 1085,\n", + " 'xarray': 335,\n", + " 'scipy': 427}" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "{db: len(benchmark.breakpoints) for db, benchmark in dbs.items()}" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "f6484891", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
repo_namestars
0scikit-learn/scikit-learn62570
2Textualize/rich52740
10pandas-dev/pandas45892
4tqdm/tqdm30055
9numpy/numpy29865
.........
64pysal/momepy538
57newton-physics/newton523
22python-hyper/h11521
37sourmash-bio/sourmash510
34Rockhopper-Technologies/enlighten503
\n", + "

68 rows \u00d7 2 columns

\n", + "
" + ], + "text/plain": [ + " repo_name stars\n", + "0 scikit-learn/scikit-learn 62570\n", + "2 Textualize/rich 52740\n", + "10 pandas-dev/pandas 45892\n", + "4 tqdm/tqdm 30055\n", + "9 numpy/numpy 29865\n", + ".. ... ...\n", + "64 pysal/momepy 538\n", + "57 newton-physics/newton 523\n", + "22 python-hyper/h11 521\n", + "37 sourmash-bio/sourmash 510\n", + "34 Rockhopper-Technologies/enlighten 503\n", + "\n", + "[68 rows x 2 columns]" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pd.read_csv(\"scratch/artifacts/raw/repos_valid.csv\")[[\"repo_name\", \"stars\"]].sort_values(\"stars\", ascending=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "66880b52", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
revisiontimehashdatebenchmark
0151430.841295NaNNaNbench_app.LaplaceInplace.time_it
1152390.843208NaNNaNbench_app.LaplaceInplace.time_it
2153350.841102NaNNaNbench_app.LaplaceInplace.time_it
3154310.843391NaNNaNbench_app.LaplaceInplace.time_it
4156230.850291NaNNaNbench_app.LaplaceInplace.time_it
..................
51872335070.00061249c560c22f137907ea6a240591e49b004f28444b1.643742e+12bench_ufunc_strides.Unary.time_ufunc
51873335140.00059945fb3a28100d509b695f432cf100f36f0e4c2f9f1.643826e+12bench_ufunc_strides.Unary.time_ufunc
51874335170.000599273b7fe0a94ae3a8ac96f4b34a396a8d3584c2101.643831e+12bench_ufunc_strides.Unary.time_ufunc
51875335200.0006048840d950e0eedf2eadb96d9867d8f8341e51aaac1.643839e+12bench_ufunc_strides.Unary.time_ufunc
51876335320.0006659d9cc5ca473d52eeda693a254fc0a1cbd4f4f1c41.643914e+12bench_ufunc_strides.Unary.time_ufunc
\n", + "

51877 rows \u00d7 5 columns

\n", + "
" + ], + "text/plain": [ + " revision time hash \\\n", + "0 15143 0.841295 NaN \n", + "1 15239 0.843208 NaN \n", + "2 15335 0.841102 NaN \n", + "3 15431 0.843391 NaN \n", + "4 15623 0.850291 NaN \n", + "... ... ... ... \n", + "51872 33507 0.000612 49c560c22f137907ea6a240591e49b004f28444b \n", + "51873 33514 0.000599 45fb3a28100d509b695f432cf100f36f0e4c2f9f \n", + "51874 33517 0.000599 273b7fe0a94ae3a8ac96f4b34a396a8d3584c210 \n", + "51875 33520 0.000604 8840d950e0eedf2eadb96d9867d8f8341e51aaac \n", + "51876 33532 0.000665 9d9cc5ca473d52eeda693a254fc0a1cbd4f4f1c4 \n", + "\n", + " date benchmark \n", + "0 NaN bench_app.LaplaceInplace.time_it \n", + "1 NaN bench_app.LaplaceInplace.time_it \n", + "2 NaN bench_app.LaplaceInplace.time_it \n", + "3 NaN bench_app.LaplaceInplace.time_it \n", + "4 NaN bench_app.LaplaceInplace.time_it \n", + "... ... ... \n", + "51872 1.643742e+12 bench_ufunc_strides.Unary.time_ufunc \n", + "51873 1.643826e+12 bench_ufunc_strides.Unary.time_ufunc \n", + "51874 1.643831e+12 bench_ufunc_strides.Unary.time_ufunc \n", + "51875 1.643839e+12 bench_ufunc_strides.Unary.time_ufunc \n", + "51876 1.643914e+12 bench_ufunc_strides.Unary.time_ufunc \n", + "\n", + "[51877 rows x 5 columns]" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dbs[\"numpy\"].summaries" + ] + }, + { + "cell_type": "code", + "execution_count": null, "id": "d23fc794", "metadata": {}, "outputs": [], "source": [ - "all_benchmarks_meas1 = BenchmarkCollection.load(\"artifacts/raw/downloads/sklearn/dashboard.fc.pkl\")\n", + "all_benchmarks_meas1 = BenchmarkCollection.load(\"scratch/artifacts/raw/downloads/sklearn/dashboard.fc.pkl\")\n", "all_benchmarks_meas2 = BenchmarkCollection.load(\n", - " \"artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl\"\n", + " \"scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl\"\n", ")" ] }, @@ -118,7 +452,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "d2e7b36c", "metadata": {}, "outputs": [], @@ -128,8 +462,8 @@ "\n", "import pandas as pd\n", "\n", - "all_commits_pth = \"artifacts/raw/commits_all.jsonl\"\n", - "filtered_commits_pth = \"artifacts/raw/commits_filtered.jsonl\"\n", + "all_commits_pth = \"scratch/artifacts/raw/commits_all.jsonl\"\n", + "filtered_commits_pth = \"scratch/artifacts/raw/commits_filtered.jsonl\"\n", "with open(all_commits_pth) as fp:\n", " all_commits = [json.loads(line.strip().replace(\"'\", '\"')) for line in fp]\n", "\n", @@ -231,7 +565,7 @@ "\n", "from datasmith.execution.utils import _get_commit_info, find_file_in_tree\n", "\n", - "out_path = \"artifacts/raw/sklearn_commits_filtered_intersection.jsonl\"\n", + "out_path = \"scratch/artifacts/raw/sklearn_commits_filtered_intersection.jsonl\"\n", "repo_name = \"scikit-learn/scikit-learn\"\n", "asv_conf_path = find_file_in_tree(repo_name, \"asv.conf.json\")\n", "# main_df = filtered_commits.copy()\n", diff --git a/scripts/benchmark_commits.py b/scratch/scripts/benchmark_commits.py similarity index 93% rename from scripts/benchmark_commits.py rename to scratch/scripts/benchmark_commits.py index b8e314b..ae4bd3a 100644 --- a/scripts/benchmark_commits.py +++ b/scratch/scripts/benchmark_commits.py @@ -8,6 +8,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path +import asv import pandas as pd from datasmith.docker.orchestrator import ( @@ -42,7 +43,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--asv-args", type=str, - default="--quick", + default="--append-samples -a rounds=2 -a repeat=2 --python=same", help="Additional arguments to pass to the asv command inside the container.", ) parser.add_argument( @@ -116,10 +117,13 @@ def main() -> None: for fut in as_completed(futures): docker_image_names.append(fut.result()) + machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] + machine_args["num_cpu"] = str(args.num_cores) asyncio.run( orchestrate( docker_image_names=docker_image_names, asv_args=asv_args, + machine_args=machine_args, max_concurrency=max_concurrency, n_cores=n_cores, output_dir=args.output_dir.absolute(), diff --git a/scripts/collate_benchmark_results.py b/scratch/scripts/collate_benchmark_results.py similarity index 100% rename from scripts/collate_benchmark_results.py rename to scratch/scripts/collate_benchmark_results.py diff --git a/scripts/collect_commits.py b/scratch/scripts/collect_commits.py similarity index 100% rename from scripts/collect_commits.py rename to scratch/scripts/collect_commits.py diff --git a/scripts/detect_breakpoints.py b/scratch/scripts/detect_breakpoints.py similarity index 100% rename from scripts/detect_breakpoints.py rename to scratch/scripts/detect_breakpoints.py diff --git a/scripts/download_dataset.py b/scratch/scripts/download_dataset.py similarity index 100% rename from scripts/download_dataset.py rename to scratch/scripts/download_dataset.py diff --git a/scripts/filter_commits.py b/scratch/scripts/filter_commits.py similarity index 100% rename from scripts/filter_commits.py rename to scratch/scripts/filter_commits.py diff --git a/scripts/scrape_repositories.py b/scratch/scripts/scrape_repositories.py similarity index 100% rename from scripts/scrape_repositories.py rename to scratch/scripts/scrape_repositories.py diff --git a/scripts/validate_containers.py b/scratch/scripts/validate_containers.py similarity index 100% rename from scripts/validate_containers.py rename to scratch/scripts/validate_containers.py diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index da00b06..31fb03b 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -1,10 +1,7 @@ #!/usr/bin/env bash # set -euo pipefail - -# : "${COMMIT_SHA:?Need to set COMMIT_SHA}" : "${ASV_ARGS:?Need to set ASV_ARGS}" -# : "${ASV_CONF_PATH:?Need to set ASV_CONF_PATH}" -# : "${RECOMMENDED_DEPS:?Need to set RECOMMENDED_DEPS}" +: "${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}" cd_asv_json_dir() { local match @@ -19,37 +16,11 @@ cd_asv_json_dir() { fi } -# 0) Hook in micromamba and activate `base` eval "$(micromamba shell hook --shell=bash)" micromamba activate base -# # COMMIT_SHA=0c65bbfe8ce816a181780d2a249c94dd653e115a -# # COMMIT_SHA=ee5d94e0a05da11272a4af1cd731f9822565048e -# COMMIT_SHA=410d8268b243f0702ca605eda5a6732376a4a557 -# COMMIT_SHA=3d01a24f32ab86afd55e9918cc22dea14a21bb97 - -# pip install pipenv -# pipenv install pyproject.toml - -# 0.5) Tune the container so all CPUs stay at fixed frequency. -# This requires root; Docker runs as root by default. -# python -m pyperf system tune || true -# git checkout "${COMMIT_SHA}" - ROOT_PATH=${PWD} -# 2) cd into the folder containing the asv.conf.json cd_asv_json_dir || exit 1 -# asv run "$COMMIT_SHA^!" \ -# --show-stderr \ -# ${BENCH_REGEX:+--bench "$BENCH_REGEX"} \ -# ${INTERLEAVE_ROUNDS:+--interleave-rounds} \ -# ${APPEND_SAMPLES:+--append-samples --record-samples} \ -# -a rounds=$ROUNDS \ -# -a number=$NUMBER \ -# -a repeat=$REPEAT \ -# ${CPU_CORE:+-a cpu_affinity=[$CPU_CORE]} \ -# | tee "$OUTPUT_DIR/benchmark_${COMMIT_SHA}.log" - # the conf name is one of "asv.conf.json" or "asv.ci.conf.json" or "asv.*.json" CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") if [[ -z "$CONF_NAME" ]]; then @@ -57,10 +28,9 @@ if [[ -z "$CONF_NAME" ]]; then exit 1 fi -# change the "results_dir" in asv.conf.json to "/output/{COMMIT_SHA}/" -# using python -# Read the python versions from the asv.conf.json (without jq) +# Read the python versions from the asv.conf.json python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +# change the "results_dir" in asv.conf.json to "/output/{COMMIT_SHA}/" for version in $python_versions; do # Create per‑Python env and install ASV python -c "import asv, os, pathlib @@ -74,15 +44,8 @@ config.html_dir = str(path / 'html') asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - # micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - # micromamba run -n "asv_${version}" pip install asv - # micromamba run -n "asv_${version}" pip install -e "${ROOT_PATH}" - # if [ -n "$RECOMMENDED_DEPS" ]; then - # # skip command if RECOMMENDED_DEPS="" - # micromamba run -n "asv_${version}" pip install "${RECOMMENDED_DEPS}" - # fi - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS} micromamba run -n "asv_${version}" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME done diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 8e29e16..32c7dca 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -67,6 +67,7 @@ async def run_container( cores: str | Sequence[int], image: str, asv_args: str, + machine_args: dict[str, str], output_dir: Path, ) -> int: """ @@ -79,8 +80,13 @@ async def run_container( # Normalise to the cpuset string Docker expects cpuset = ",".join(map(str, cores)) if not isinstance(cores, str) else cores num_cores = len(cpuset.split(",")) + sha = image.split(":")[0].split("-")[-1] # Extract the commit SHA from the image name + if "machine" not in machine_args: + raise ValueError("machine_args must contain a 'machine' key") # noqa: TRY003 + machine_args["machine"] = sha env = { - "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores}", + "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores} --set-commit-hash {sha} --machine {sha}", + "ASV_MACHINE_ARGS": " ".join([f"--{k} '{v}'" for k, v in machine_args.items()]), } def _launch() -> int: @@ -89,9 +95,10 @@ def _launch() -> int: # Log the exact command a human could copy-paste logger.info( - "$ docker run --rm --name %s -e ASV_ARGS='%s' --cpuset-cpus %s %s", + "$ docker run --rm --name %s -e ASV_ARGS='%s' -e ASV_MACHINE_ARGS='%s' --cpuset-cpus %s %s", container_name, env["ASV_ARGS"], + env["ASV_MACHINE_ARGS"], cpuset, image, ) @@ -126,6 +133,7 @@ def _launch() -> int: async def orchestrate( docker_image_names: Sequence[str], asv_args: str, + machine_args: dict[str, str], max_concurrency: int, n_cores: int, output_dir: Path, @@ -156,6 +164,7 @@ async def worker(idx: int, image: str) -> int: cores=cpuset_str, image=image, asv_args=asv_args, + machine_args=machine_args, output_dir=output_dir, ) status = "OK" if rc == 0 else f"FAIL({rc})" @@ -171,4 +180,4 @@ async def worker(idx: int, image: str) -> int: failures = sum(rc != 0 for rc in results) if failures: sys.exit(f"{failures} container(s) failed") - logger.info("All benchmarks finished successfully ✔") + logger.info("All benchmarks finished") diff --git a/src/datasmith/scrape/scrape_dashboards.py b/src/datasmith/scrape/scrape_dashboards.py index 75ce132..538b3af 100644 --- a/src/datasmith/scrape/scrape_dashboards.py +++ b/src/datasmith/scrape/scrape_dashboards.py @@ -77,7 +77,7 @@ def make_benchmark_from_html(base_url: str, html_dir: str, force: bool) -> Bench for p in tqdm(param_sets, desc="machines"): # graph_dir = make_graph_dir(p, all_keys, quote=is_remote) for bench in tqdm(benchmarks, desc="benchmarks", leave=False): - bench_url = asv.graph.Graph.get_file_path(params=p, benchmark_name=f"{bench}.json") + bench_url = asv.graph.Graph.get_file_path(params=p, benchmark_name=f"{bench}.json") # pyright: ignore[reportAttributeAccessIssue] full_url = join_path(base_url, bench_url) local = dl_and_open(full_url, html_dir, base=base_url, force=force) if local is None: diff --git a/src/datasmith/scrape/utils.py b/src/datasmith/scrape/utils.py index e11f2e2..29b0eb6 100644 --- a/src/datasmith/scrape/utils.py +++ b/src/datasmith/scrape/utils.py @@ -96,12 +96,10 @@ def dl_and_open(url: str, dl_dir: str, base: str | None = None, force: bool = Fa is_http = parsed.scheme in ("http", "https") is_file = parsed.scheme == "file" - # ---- derive the URL-relative path, *without* any cleaning ---- rel_path = url[len(base) :].lstrip("/") if base and url.startswith(base) else parsed.path.lstrip("/") raw_parts = [unquote(p) for p in Path(rel_path).parts] raw_path = Path(dl_dir).joinpath(*raw_parts).resolve() - # ---- if that exact path already exists, use it as-is ---- if raw_path.exists(): local_path = raw_path else: @@ -115,10 +113,8 @@ def clean_component(comp: str) -> str: clean_parts = [clean_component(p) for p in raw_parts] local_path = Path(dl_dir).joinpath(*clean_parts).resolve() - # make sure the destination directory exists local_path.parent.mkdir(parents=True, exist_ok=True) - # ---- download/copy the file just like before ---- if is_http: if force or not local_path.exists(): try: From e90fdfbf5dd42a9dc1e66b27d5aea3928bc53ac4 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 16 Aug 2025 23:30:42 +0000 Subject: [PATCH 06/20] upgrade pkgs --- uv.lock | 2238 +++++++++++++++++++++++++++++++------------------------ 1 file changed, 1245 insertions(+), 993 deletions(-) diff --git a/uv.lock b/uv.lock index 6edecbc..79cefbb 100644 --- a/uv.lock +++ b/uv.lock @@ -108,7 +108,7 @@ wheels = [ [[package]] name = "build" -version = "1.2.2.post1" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "os_name == 'nt'" }, @@ -117,9 +117,9 @@ dependencies = [ { name = "pyproject-hooks" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7", size = 46701, upload-time = "2024-10-06T17:22:25.251Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/1c/23e33405a7c9eac261dff640926b8b5adaed6a6eb3e1767d441ed611d0c0/build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397", size = 48544, upload-time = "2025-08-01T21:27:09.268Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5", size = 22950, upload-time = "2024-10-06T17:22:23.299Z" }, + { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382, upload-time = "2025-08-01T21:27:07.844Z" }, ] [[package]] @@ -133,11 +133,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.6.15" +version = "2025.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] [[package]] @@ -229,76 +229,77 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671, upload-time = "2025-05-02T08:34:12.696Z" }, - { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, - { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, - { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, - { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, - { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, - { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, - { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, - { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, - { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436, upload-time = "2025-05-02T08:34:35.907Z" }, - { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772, upload-time = "2025-05-02T08:34:37.935Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ca/9a0983dd5c8e9733565cf3db4df2b0a2e9a82659fd8aa2a868ac6e4a991f/charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05", size = 207520, upload-time = "2025-08-09T07:57:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/c6/99271dc37243a4f925b09090493fb96c9333d7992c6187f5cfe5312008d2/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e", size = 147307, upload-time = "2025-08-09T07:57:12.4Z" }, + { url = "https://files.pythonhosted.org/packages/e4/69/132eab043356bba06eb333cc2cc60c6340857d0a2e4ca6dc2b51312886b3/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99", size = 160448, upload-time = "2025-08-09T07:57:13.712Z" }, + { url = "https://files.pythonhosted.org/packages/04/9a/914d294daa4809c57667b77470533e65def9c0be1ef8b4c1183a99170e9d/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7", size = 157758, upload-time = "2025-08-09T07:57:14.979Z" }, + { url = "https://files.pythonhosted.org/packages/b0/a8/6f5bcf1bcf63cb45625f7c5cadca026121ff8a6c8a3256d8d8cd59302663/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7", size = 152487, upload-time = "2025-08-09T07:57:16.332Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/d3d0e9592f4e504f9dea08b8db270821c909558c353dc3b457ed2509f2fb/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19", size = 150054, upload-time = "2025-08-09T07:57:17.576Z" }, + { url = "https://files.pythonhosted.org/packages/20/30/5f64fe3981677fe63fa987b80e6c01042eb5ff653ff7cec1b7bd9268e54e/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312", size = 161703, upload-time = "2025-08-09T07:57:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ef/dd08b2cac9284fd59e70f7d97382c33a3d0a926e45b15fc21b3308324ffd/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc", size = 159096, upload-time = "2025-08-09T07:57:21.329Z" }, + { url = "https://files.pythonhosted.org/packages/45/8c/dcef87cfc2b3f002a6478f38906f9040302c68aebe21468090e39cde1445/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34", size = 153852, upload-time = "2025-08-09T07:57:22.608Z" }, + { url = "https://files.pythonhosted.org/packages/63/86/9cbd533bd37883d467fcd1bd491b3547a3532d0fbb46de2b99feeebf185e/charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432", size = 99840, upload-time = "2025-08-09T07:57:23.883Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d6/7e805c8e5c46ff9729c49950acc4ee0aeb55efb8b3a56687658ad10c3216/charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca", size = 107438, upload-time = "2025-08-09T07:57:25.287Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] [[package]] @@ -344,14 +345,11 @@ wheels = [ [[package]] name = "comm" -version = "0.2.2" +version = "0.2.3" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, + { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, ] [[package]] @@ -437,13 +435,10 @@ name = "contourpy" version = "1.3.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/66/54/eb9bfc647b19f2009dd5c7f5ec51c4e6ca831725f1aea7a993034f483147/contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54", size = 13466130, upload-time = "2025-04-15T17:47:53.79Z" } wheels = [ @@ -505,78 +500,185 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/68/7f46fb537958e87427d98a4074bcde4b67a70b04900cfc5ce29bc2f556c1/contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5", size = 221791, upload-time = "2025-04-15T17:45:24.794Z" }, ] +[[package]] +name = "contourpy" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", +] +dependencies = [ + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, + { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, + { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, + { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, + { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, + { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, + { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, + { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, + { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, + { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, + { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, + { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, + { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, + { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, + { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, + { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, + { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, + { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, + { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, + { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, + { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, + { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, + { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, + { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, +] + [[package]] name = "coverage" -version = "7.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/e0/98670a80884f64578f0c22cd70c5e81a6e07b08167721c7487b4d70a7ca0/coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec", size = 813650, upload-time = "2025-06-13T13:02:28.627Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/78/1c1c5ec58f16817c09cbacb39783c3655d54a221b6552f47ff5ac9297603/coverage-7.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca", size = 212028, upload-time = "2025-06-13T13:00:29.293Z" }, - { url = "https://files.pythonhosted.org/packages/98/db/e91b9076f3a888e3b4ad7972ea3842297a52cc52e73fd1e529856e473510/coverage-7.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509", size = 212420, upload-time = "2025-06-13T13:00:34.027Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d0/2b3733412954576b0aea0a16c3b6b8fbe95eb975d8bfa10b07359ead4252/coverage-7.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf95981b126f23db63e9dbe4cf65bd71f9a6305696fa5e2262693bc4e2183f5b", size = 241529, upload-time = "2025-06-13T13:00:35.786Z" }, - { url = "https://files.pythonhosted.org/packages/b3/00/5e2e5ae2e750a872226a68e984d4d3f3563cb01d1afb449a17aa819bc2c4/coverage-7.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f05031cf21699785cd47cb7485f67df619e7bcdae38e0fde40d23d3d0210d3c3", size = 239403, upload-time = "2025-06-13T13:00:37.399Z" }, - { url = "https://files.pythonhosted.org/packages/37/3b/a2c27736035156b0a7c20683afe7df498480c0dfdf503b8c878a21b6d7fb/coverage-7.9.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4fbcab8764dc072cb651a4bcda4d11fb5658a1d8d68842a862a6610bd8cfa3", size = 240548, upload-time = "2025-06-13T13:00:39.647Z" }, - { url = "https://files.pythonhosted.org/packages/98/f5/13d5fc074c3c0e0dc80422d9535814abf190f1254d7c3451590dc4f8b18c/coverage-7.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16649a7330ec307942ed27d06ee7e7a38417144620bb3d6e9a18ded8a2d3e5", size = 240459, upload-time = "2025-06-13T13:00:40.934Z" }, - { url = "https://files.pythonhosted.org/packages/36/24/24b9676ea06102df824c4a56ffd13dc9da7904478db519efa877d16527d5/coverage-7.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cea0a27a89e6432705fffc178064503508e3c0184b4f061700e771a09de58187", size = 239128, upload-time = "2025-06-13T13:00:42.343Z" }, - { url = "https://files.pythonhosted.org/packages/be/05/242b7a7d491b369ac5fee7908a6e5ba42b3030450f3ad62c645b40c23e0e/coverage-7.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e980b53a959fa53b6f05343afbd1e6f44a23ed6c23c4b4c56c6662bbb40c82ce", size = 239402, upload-time = "2025-06-13T13:00:43.634Z" }, - { url = "https://files.pythonhosted.org/packages/73/e0/4de7f87192fa65c9c8fbaeb75507e124f82396b71de1797da5602898be32/coverage-7.9.1-cp310-cp310-win32.whl", hash = "sha256:70760b4c5560be6ca70d11f8988ee6542b003f982b32f83d5ac0b72476607b70", size = 214518, upload-time = "2025-06-13T13:00:45.622Z" }, - { url = "https://files.pythonhosted.org/packages/d5/ab/5e4e2fe458907d2a65fab62c773671cfc5ac704f1e7a9ddd91996f66e3c2/coverage-7.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a66e8f628b71f78c0e0342003d53b53101ba4e00ea8dabb799d9dba0abbbcebe", size = 215436, upload-time = "2025-06-13T13:00:47.245Z" }, - { url = "https://files.pythonhosted.org/packages/60/34/fa69372a07d0903a78ac103422ad34db72281c9fc625eba94ac1185da66f/coverage-7.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95c765060e65c692da2d2f51a9499c5e9f5cf5453aeaf1420e3fc847cc060582", size = 212146, upload-time = "2025-06-13T13:00:48.496Z" }, - { url = "https://files.pythonhosted.org/packages/27/f0/da1894915d2767f093f081c42afeba18e760f12fdd7a2f4acbe00564d767/coverage-7.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba383dc6afd5ec5b7a0d0c23d38895db0e15bcba7fb0fa8901f245267ac30d86", size = 212536, upload-time = "2025-06-13T13:00:51.535Z" }, - { url = "https://files.pythonhosted.org/packages/10/d5/3fc33b06e41e390f88eef111226a24e4504d216ab8e5d1a7089aa5a3c87a/coverage-7.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae0383f13cbdcf1e5e7014489b0d71cc0106458878ccde52e8a12ced4298ed", size = 245092, upload-time = "2025-06-13T13:00:52.883Z" }, - { url = "https://files.pythonhosted.org/packages/0a/39/7aa901c14977aba637b78e95800edf77f29f5a380d29768c5b66f258305b/coverage-7.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69aa417a030bf11ec46149636314c24c8d60fadb12fc0ee8f10fda0d918c879d", size = 242806, upload-time = "2025-06-13T13:00:54.571Z" }, - { url = "https://files.pythonhosted.org/packages/43/fc/30e5cfeaf560b1fc1989227adedc11019ce4bb7cce59d65db34fe0c2d963/coverage-7.9.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a4be2a28656afe279b34d4f91c3e26eccf2f85500d4a4ff0b1f8b54bf807338", size = 244610, upload-time = "2025-06-13T13:00:56.932Z" }, - { url = "https://files.pythonhosted.org/packages/bf/15/cca62b13f39650bc87b2b92bb03bce7f0e79dd0bf2c7529e9fc7393e4d60/coverage-7.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:382e7ddd5289f140259b610e5f5c58f713d025cb2f66d0eb17e68d0a94278875", size = 244257, upload-time = "2025-06-13T13:00:58.545Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1a/c0f2abe92c29e1464dbd0ff9d56cb6c88ae2b9e21becdb38bea31fcb2f6c/coverage-7.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e5532482344186c543c37bfad0ee6069e8ae4fc38d073b8bc836fc8f03c9e250", size = 242309, upload-time = "2025-06-13T13:00:59.836Z" }, - { url = "https://files.pythonhosted.org/packages/57/8d/c6fd70848bd9bf88fa90df2af5636589a8126d2170f3aade21ed53f2b67a/coverage-7.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a39d18b3f50cc121d0ce3838d32d58bd1d15dab89c910358ebefc3665712256c", size = 242898, upload-time = "2025-06-13T13:01:02.506Z" }, - { url = "https://files.pythonhosted.org/packages/c2/9e/6ca46c7bff4675f09a66fe2797cd1ad6a24f14c9c7c3b3ebe0470a6e30b8/coverage-7.9.1-cp311-cp311-win32.whl", hash = "sha256:dd24bd8d77c98557880def750782df77ab2b6885a18483dc8588792247174b32", size = 214561, upload-time = "2025-06-13T13:01:04.012Z" }, - { url = "https://files.pythonhosted.org/packages/a1/30/166978c6302010742dabcdc425fa0f938fa5a800908e39aff37a7a876a13/coverage-7.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b55ad10a35a21b8015eabddc9ba31eb590f54adc9cd39bcf09ff5349fd52125", size = 215493, upload-time = "2025-06-13T13:01:05.702Z" }, - { url = "https://files.pythonhosted.org/packages/60/07/a6d2342cd80a5be9f0eeab115bc5ebb3917b4a64c2953534273cf9bc7ae6/coverage-7.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ad935f0016be24c0e97fc8c40c465f9c4b85cbbe6eac48934c0dc4d2568321e", size = 213869, upload-time = "2025-06-13T13:01:09.345Z" }, - { url = "https://files.pythonhosted.org/packages/68/d9/7f66eb0a8f2fce222de7bdc2046ec41cb31fe33fb55a330037833fb88afc/coverage-7.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8de12b4b87c20de895f10567639c0797b621b22897b0af3ce4b4e204a743626", size = 212336, upload-time = "2025-06-13T13:01:10.909Z" }, - { url = "https://files.pythonhosted.org/packages/20/20/e07cb920ef3addf20f052ee3d54906e57407b6aeee3227a9c91eea38a665/coverage-7.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5add197315a054e92cee1b5f686a2bcba60c4c3e66ee3de77ace6c867bdee7cb", size = 212571, upload-time = "2025-06-13T13:01:12.518Z" }, - { url = "https://files.pythonhosted.org/packages/78/f8/96f155de7e9e248ca9c8ff1a40a521d944ba48bec65352da9be2463745bf/coverage-7.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600a1d4106fe66f41e5d0136dfbc68fe7200a5cbe85610ddf094f8f22e1b0300", size = 246377, upload-time = "2025-06-13T13:01:14.87Z" }, - { url = "https://files.pythonhosted.org/packages/3e/cf/1d783bd05b7bca5c10ded5f946068909372e94615a4416afadfe3f63492d/coverage-7.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a876e4c3e5a2a1715a6608906aa5a2e0475b9c0f68343c2ada98110512ab1d8", size = 243394, upload-time = "2025-06-13T13:01:16.23Z" }, - { url = "https://files.pythonhosted.org/packages/02/dd/e7b20afd35b0a1abea09fb3998e1abc9f9bd953bee548f235aebd2b11401/coverage-7.9.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81f34346dd63010453922c8e628a52ea2d2ccd73cb2487f7700ac531b247c8a5", size = 245586, upload-time = "2025-06-13T13:01:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/4e/38/b30b0006fea9d617d1cb8e43b1bc9a96af11eff42b87eb8c716cf4d37469/coverage-7.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:888f8eee13f2377ce86d44f338968eedec3291876b0b8a7289247ba52cb984cd", size = 245396, upload-time = "2025-06-13T13:01:19.164Z" }, - { url = "https://files.pythonhosted.org/packages/31/e4/4d8ec1dc826e16791f3daf1b50943e8e7e1eb70e8efa7abb03936ff48418/coverage-7.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9969ef1e69b8c8e1e70d591f91bbc37fc9a3621e447525d1602801a24ceda898", size = 243577, upload-time = "2025-06-13T13:01:22.433Z" }, - { url = "https://files.pythonhosted.org/packages/25/f4/b0e96c5c38e6e40ef465c4bc7f138863e2909c00e54a331da335faf0d81a/coverage-7.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60c458224331ee3f1a5b472773e4a085cc27a86a0b48205409d364272d67140d", size = 244809, upload-time = "2025-06-13T13:01:24.143Z" }, - { url = "https://files.pythonhosted.org/packages/8a/65/27e0a1fa5e2e5079bdca4521be2f5dabf516f94e29a0defed35ac2382eb2/coverage-7.9.1-cp312-cp312-win32.whl", hash = "sha256:5f646a99a8c2b3ff4c6a6e081f78fad0dde275cd59f8f49dc4eab2e394332e74", size = 214724, upload-time = "2025-06-13T13:01:25.435Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a8/d5b128633fd1a5e0401a4160d02fa15986209a9e47717174f99dc2f7166d/coverage-7.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:30f445f85c353090b83e552dcbbdad3ec84c7967e108c3ae54556ca69955563e", size = 215535, upload-time = "2025-06-13T13:01:27.861Z" }, - { url = "https://files.pythonhosted.org/packages/a3/37/84bba9d2afabc3611f3e4325ee2c6a47cd449b580d4a606b240ce5a6f9bf/coverage-7.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:af41da5dca398d3474129c58cb2b106a5d93bbb196be0d307ac82311ca234342", size = 213904, upload-time = "2025-06-13T13:01:29.202Z" }, - { url = "https://files.pythonhosted.org/packages/d0/a7/a027970c991ca90f24e968999f7d509332daf6b8c3533d68633930aaebac/coverage-7.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:31324f18d5969feef7344a932c32428a2d1a3e50b15a6404e97cba1cc9b2c631", size = 212358, upload-time = "2025-06-13T13:01:30.909Z" }, - { url = "https://files.pythonhosted.org/packages/f2/48/6aaed3651ae83b231556750280682528fea8ac7f1232834573472d83e459/coverage-7.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c804506d624e8a20fb3108764c52e0eef664e29d21692afa375e0dd98dc384f", size = 212620, upload-time = "2025-06-13T13:01:32.256Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2a/f4b613f3b44d8b9f144847c89151992b2b6b79cbc506dee89ad0c35f209d/coverage-7.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef64c27bc40189f36fcc50c3fb8f16ccda73b6a0b80d9bd6e6ce4cffcd810bbd", size = 245788, upload-time = "2025-06-13T13:01:33.948Z" }, - { url = "https://files.pythonhosted.org/packages/04/d2/de4fdc03af5e4e035ef420ed26a703c6ad3d7a07aff2e959eb84e3b19ca8/coverage-7.9.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4fe2348cc6ec372e25adec0219ee2334a68d2f5222e0cba9c0d613394e12d86", size = 243001, upload-time = "2025-06-13T13:01:35.285Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e8/eed18aa5583b0423ab7f04e34659e51101135c41cd1dcb33ac1d7013a6d6/coverage-7.9.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ed2186fe52fcc24d4561041979a0dec69adae7bce2ae8d1c49eace13e55c43", size = 244985, upload-time = "2025-06-13T13:01:36.712Z" }, - { url = "https://files.pythonhosted.org/packages/17/f8/ae9e5cce8885728c934eaa58ebfa8281d488ef2afa81c3dbc8ee9e6d80db/coverage-7.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25308bd3d00d5eedd5ae7d4357161f4df743e3c0240fa773ee1b0f75e6c7c0f1", size = 245152, upload-time = "2025-06-13T13:01:39.303Z" }, - { url = "https://files.pythonhosted.org/packages/5a/c8/272c01ae792bb3af9b30fac14d71d63371db227980682836ec388e2c57c0/coverage-7.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73e9439310f65d55a5a1e0564b48e34f5369bee943d72c88378f2d576f5a5751", size = 243123, upload-time = "2025-06-13T13:01:40.727Z" }, - { url = "https://files.pythonhosted.org/packages/8c/d0/2819a1e3086143c094ab446e3bdf07138527a7b88cb235c488e78150ba7a/coverage-7.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37ab6be0859141b53aa89412a82454b482c81cf750de4f29223d52268a86de67", size = 244506, upload-time = "2025-06-13T13:01:42.184Z" }, - { url = "https://files.pythonhosted.org/packages/8b/4e/9f6117b89152df7b6112f65c7a4ed1f2f5ec8e60c4be8f351d91e7acc848/coverage-7.9.1-cp313-cp313-win32.whl", hash = "sha256:64bdd969456e2d02a8b08aa047a92d269c7ac1f47e0c977675d550c9a0863643", size = 214766, upload-time = "2025-06-13T13:01:44.482Z" }, - { url = "https://files.pythonhosted.org/packages/27/0f/4b59f7c93b52c2c4ce7387c5a4e135e49891bb3b7408dcc98fe44033bbe0/coverage-7.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:be9e3f68ca9edb897c2184ad0eee815c635565dbe7a0e7e814dc1f7cbab92c0a", size = 215568, upload-time = "2025-06-13T13:01:45.772Z" }, - { url = "https://files.pythonhosted.org/packages/09/1e/9679826336f8c67b9c39a359352882b24a8a7aee48d4c9cad08d38d7510f/coverage-7.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:1c503289ffef1d5105d91bbb4d62cbe4b14bec4d13ca225f9c73cde9bb46207d", size = 213939, upload-time = "2025-06-13T13:01:47.087Z" }, - { url = "https://files.pythonhosted.org/packages/bb/5b/5c6b4e7a407359a2e3b27bf9c8a7b658127975def62077d441b93a30dbe8/coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0", size = 213079, upload-time = "2025-06-13T13:01:48.554Z" }, - { url = "https://files.pythonhosted.org/packages/a2/22/1e2e07279fd2fd97ae26c01cc2186e2258850e9ec125ae87184225662e89/coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d", size = 213299, upload-time = "2025-06-13T13:01:49.997Z" }, - { url = "https://files.pythonhosted.org/packages/14/c0/4c5125a4b69d66b8c85986d3321520f628756cf524af810baab0790c7647/coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f", size = 256535, upload-time = "2025-06-13T13:01:51.314Z" }, - { url = "https://files.pythonhosted.org/packages/81/8b/e36a04889dda9960be4263e95e777e7b46f1bb4fc32202612c130a20c4da/coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029", size = 252756, upload-time = "2025-06-13T13:01:54.403Z" }, - { url = "https://files.pythonhosted.org/packages/98/82/be04eff8083a09a4622ecd0e1f31a2c563dbea3ed848069e7b0445043a70/coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece", size = 254912, upload-time = "2025-06-13T13:01:56.769Z" }, - { url = "https://files.pythonhosted.org/packages/0f/25/c26610a2c7f018508a5ab958e5b3202d900422cf7cdca7670b6b8ca4e8df/coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683", size = 256144, upload-time = "2025-06-13T13:01:58.19Z" }, - { url = "https://files.pythonhosted.org/packages/c5/8b/fb9425c4684066c79e863f1e6e7ecebb49e3a64d9f7f7860ef1688c56f4a/coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f", size = 254257, upload-time = "2025-06-13T13:01:59.645Z" }, - { url = "https://files.pythonhosted.org/packages/93/df/27b882f54157fc1131e0e215b0da3b8d608d9b8ef79a045280118a8f98fe/coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10", size = 255094, upload-time = "2025-06-13T13:02:01.37Z" }, - { url = "https://files.pythonhosted.org/packages/41/5f/cad1c3dbed8b3ee9e16fa832afe365b4e3eeab1fb6edb65ebbf745eabc92/coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363", size = 215437, upload-time = "2025-06-13T13:02:02.905Z" }, - { url = "https://files.pythonhosted.org/packages/99/4d/fad293bf081c0e43331ca745ff63673badc20afea2104b431cdd8c278b4c/coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7", size = 216605, upload-time = "2025-06-13T13:02:05.638Z" }, - { url = "https://files.pythonhosted.org/packages/1f/56/4ee027d5965fc7fc126d7ec1187529cc30cc7d740846e1ecb5e92d31b224/coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c", size = 214392, upload-time = "2025-06-13T13:02:07.642Z" }, - { url = "https://files.pythonhosted.org/packages/a5/d6/c41dd9b02bf16ec001aaf1cbef665537606899a3db1094e78f5ae17540ca/coverage-7.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f424507f57878e424d9a95dc4ead3fbdd72fd201e404e861e465f28ea469951", size = 212029, upload-time = "2025-06-13T13:02:09.058Z" }, - { url = "https://files.pythonhosted.org/packages/f8/c0/40420d81d731f84c3916dcdf0506b3e6c6570817bff2576b83f780914ae6/coverage-7.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:535fde4001b2783ac80865d90e7cc7798b6b126f4cd8a8c54acfe76804e54e58", size = 212407, upload-time = "2025-06-13T13:02:11.151Z" }, - { url = "https://files.pythonhosted.org/packages/9b/87/f0db7d62d0e09f14d6d2f6ae8c7274a2f09edf74895a34b412a0601e375a/coverage-7.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02532fd3290bb8fa6bec876520842428e2a6ed6c27014eca81b031c2d30e3f71", size = 241160, upload-time = "2025-06-13T13:02:12.864Z" }, - { url = "https://files.pythonhosted.org/packages/a9/b7/3337c064f058a5d7696c4867159651a5b5fb01a5202bcf37362f0c51400e/coverage-7.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56f5eb308b17bca3bbff810f55ee26d51926d9f89ba92707ee41d3c061257e55", size = 239027, upload-time = "2025-06-13T13:02:14.294Z" }, - { url = "https://files.pythonhosted.org/packages/7e/a9/5898a283f66d1bd413c32c2e0e05408196fd4f37e206e2b06c6e0c626e0e/coverage-7.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfa447506c1a52271f1b0de3f42ea0fa14676052549095e378d5bff1c505ff7b", size = 240145, upload-time = "2025-06-13T13:02:15.745Z" }, - { url = "https://files.pythonhosted.org/packages/e0/33/d96e3350078a3c423c549cb5b2ba970de24c5257954d3e4066e2b2152d30/coverage-7.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9ca8e220006966b4a7b68e8984a6aee645a0384b0769e829ba60281fe61ec4f7", size = 239871, upload-time = "2025-06-13T13:02:17.344Z" }, - { url = "https://files.pythonhosted.org/packages/1d/6e/6fb946072455f71a820cac144d49d11747a0f1a21038060a68d2d0200499/coverage-7.9.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49f1d0788ba5b7ba65933f3a18864117c6506619f5ca80326b478f72acf3f385", size = 238122, upload-time = "2025-06-13T13:02:18.849Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5c/bc43f25c8586840ce25a796a8111acf6a2b5f0909ba89a10d41ccff3920d/coverage-7.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68cd53aec6f45b8e4724c0950ce86eacb775c6be01ce6e3669fe4f3a21e768ed", size = 239058, upload-time = "2025-06-13T13:02:21.423Z" }, - { url = "https://files.pythonhosted.org/packages/11/d8/ce2007418dd7fd00ff8c8b898bb150bb4bac2d6a86df05d7b88a07ff595f/coverage-7.9.1-cp39-cp39-win32.whl", hash = "sha256:95335095b6c7b1cc14c3f3f17d5452ce677e8490d101698562b2ffcacc304c8d", size = 214532, upload-time = "2025-06-13T13:02:22.857Z" }, - { url = "https://files.pythonhosted.org/packages/20/21/334e76fa246e92e6d69cab217f7c8a70ae0cc8f01438bd0544103f29528e/coverage-7.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e1b5191d1648acc439b24721caab2fd0c86679d8549ed2c84d5a7ec1bedcc244", size = 215439, upload-time = "2025-06-13T13:02:24.268Z" }, - { url = "https://files.pythonhosted.org/packages/3e/e5/c723545c3fd3204ebde3b4cc4b927dce709d3b6dc577754bb57f63ca4a4a/coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514", size = 204009, upload-time = "2025-06-13T13:02:25.787Z" }, - { url = "https://files.pythonhosted.org/packages/08/b8/7ddd1e8ba9701dea08ce22029917140e6f66a859427406579fd8d0ca7274/coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c", size = 204000, upload-time = "2025-06-13T13:02:27.173Z" }, +version = "7.10.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/2c/253cc41cd0f40b84c1c34c5363e0407d73d4a1cae005fed6db3b823175bd/coverage-7.10.3.tar.gz", hash = "sha256:812ba9250532e4a823b070b0420a36499859542335af3dca8f47fc6aa1a05619", size = 822936, upload-time = "2025-08-10T21:27:39.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/44/e14576c34b37764c821866909788ff7463228907ab82bae188dab2b421f1/coverage-7.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53808194afdf948c462215e9403cca27a81cf150d2f9b386aee4dab614ae2ffe", size = 215964, upload-time = "2025-08-10T21:25:22.828Z" }, + { url = "https://files.pythonhosted.org/packages/e6/15/f4f92d9b83100903efe06c9396ee8d8bdba133399d37c186fc5b16d03a87/coverage-7.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f4d1b837d1abf72187a61645dbf799e0d7705aa9232924946e1f57eb09a3bf00", size = 216361, upload-time = "2025-08-10T21:25:25.603Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/c92e8cd5e89acc41cfc026dfb7acedf89661ce2ea1ee0ee13aacb6b2c20c/coverage-7.10.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2a90dd4505d3cc68b847ab10c5ee81822a968b5191664e8a0801778fa60459fa", size = 243115, upload-time = "2025-08-10T21:25:27.09Z" }, + { url = "https://files.pythonhosted.org/packages/23/53/c1d8c2778823b1d95ca81701bb8f42c87dc341a2f170acdf716567523490/coverage-7.10.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d52989685ff5bf909c430e6d7f6550937bc6d6f3e6ecb303c97a86100efd4596", size = 244927, upload-time = "2025-08-10T21:25:28.77Z" }, + { url = "https://files.pythonhosted.org/packages/79/41/1e115fd809031f432b4ff8e2ca19999fb6196ab95c35ae7ad5e07c001130/coverage-7.10.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdb558a1d97345bde3a9f4d3e8d11c9e5611f748646e9bb61d7d612a796671b5", size = 246784, upload-time = "2025-08-10T21:25:30.195Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b2/0eba9bdf8f1b327ae2713c74d4b7aa85451bb70622ab4e7b8c000936677c/coverage-7.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c9e6331a8f09cb1fc8bda032752af03c366870b48cce908875ba2620d20d0ad4", size = 244828, upload-time = "2025-08-10T21:25:31.785Z" }, + { url = "https://files.pythonhosted.org/packages/1f/cc/74c56b6bf71f2a53b9aa3df8bc27163994e0861c065b4fe3a8ac290bed35/coverage-7.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:992f48bf35b720e174e7fae916d943599f1a66501a2710d06c5f8104e0756ee1", size = 242844, upload-time = "2025-08-10T21:25:33.37Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/ac183fbe19ac5596c223cb47af5737f4437e7566100b7e46cc29b66695a5/coverage-7.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c5595fc4ad6a39312c786ec3326d7322d0cf10e3ac6a6df70809910026d67cfb", size = 243721, upload-time = "2025-08-10T21:25:34.939Z" }, + { url = "https://files.pythonhosted.org/packages/57/96/cb90da3b5a885af48f531905234a1e7376acfc1334242183d23154a1c285/coverage-7.10.3-cp310-cp310-win32.whl", hash = "sha256:9e92fa1f2bd5a57df9d00cf9ce1eb4ef6fccca4ceabec1c984837de55329db34", size = 218481, upload-time = "2025-08-10T21:25:36.935Z" }, + { url = "https://files.pythonhosted.org/packages/15/67/1ba4c7d75745c4819c54a85766e0a88cc2bff79e1760c8a2debc34106dc2/coverage-7.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b96524d6e4a3ce6a75c56bb15dbd08023b0ae2289c254e15b9fbdddf0c577416", size = 219382, upload-time = "2025-08-10T21:25:38.267Z" }, + { url = "https://files.pythonhosted.org/packages/87/04/810e506d7a19889c244d35199cbf3239a2f952b55580aa42ca4287409424/coverage-7.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2ff2e2afdf0d51b9b8301e542d9c21a8d084fd23d4c8ea2b3a1b3c96f5f7397", size = 216075, upload-time = "2025-08-10T21:25:39.891Z" }, + { url = "https://files.pythonhosted.org/packages/2e/50/6b3fbab034717b4af3060bdaea6b13dfdc6b1fad44b5082e2a95cd378a9a/coverage-7.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18ecc5d1b9a8c570f6c9b808fa9a2b16836b3dd5414a6d467ae942208b095f85", size = 216476, upload-time = "2025-08-10T21:25:41.137Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/4368c624c1ed92659812b63afc76c492be7867ac8e64b7190b88bb26d43c/coverage-7.10.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1af4461b25fe92889590d438905e1fc79a95680ec2a1ff69a591bb3fdb6c7157", size = 246865, upload-time = "2025-08-10T21:25:42.408Z" }, + { url = "https://files.pythonhosted.org/packages/34/12/5608f76070939395c17053bf16e81fd6c06cf362a537ea9d07e281013a27/coverage-7.10.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3966bc9a76b09a40dc6063c8b10375e827ea5dfcaffae402dd65953bef4cba54", size = 248800, upload-time = "2025-08-10T21:25:44.098Z" }, + { url = "https://files.pythonhosted.org/packages/ce/52/7cc90c448a0ad724283cbcdfd66b8d23a598861a6a22ac2b7b8696491798/coverage-7.10.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:205a95b87ef4eb303b7bc5118b47b6b6604a644bcbdb33c336a41cfc0a08c06a", size = 250904, upload-time = "2025-08-10T21:25:45.384Z" }, + { url = "https://files.pythonhosted.org/packages/e6/70/9967b847063c1c393b4f4d6daab1131558ebb6b51f01e7df7150aa99f11d/coverage-7.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b3801b79fb2ad61e3c7e2554bab754fc5f105626056980a2b9cf3aef4f13f84", size = 248597, upload-time = "2025-08-10T21:25:47.059Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fe/263307ce6878b9ed4865af42e784b42bb82d066bcf10f68defa42931c2c7/coverage-7.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0dc69c60224cda33d384572da945759756e3f06b9cdac27f302f53961e63160", size = 246647, upload-time = "2025-08-10T21:25:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/8e/27/d27af83ad162eba62c4eb7844a1de6cf7d9f6b185df50b0a3514a6f80ddd/coverage-7.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a83d4f134bab2c7ff758e6bb1541dd72b54ba295ced6a63d93efc2e20cb9b124", size = 247290, upload-time = "2025-08-10T21:25:49.945Z" }, + { url = "https://files.pythonhosted.org/packages/28/83/904ff27e15467a5622dbe9ad2ed5831b4a616a62570ec5924d06477dff5a/coverage-7.10.3-cp311-cp311-win32.whl", hash = "sha256:54e409dd64e5302b2a8fdf44ec1c26f47abd1f45a2dcf67bd161873ee05a59b8", size = 218521, upload-time = "2025-08-10T21:25:51.208Z" }, + { url = "https://files.pythonhosted.org/packages/b8/29/bc717b8902faaccf0ca486185f0dcab4778561a529dde51cb157acaafa16/coverage-7.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:30c601610a9b23807c5e9e2e442054b795953ab85d525c3de1b1b27cebeb2117", size = 219412, upload-time = "2025-08-10T21:25:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/7b/7a/5a1a7028c11bb589268c656c6b3f2bbf06e0aced31bbdf7a4e94e8442cc0/coverage-7.10.3-cp311-cp311-win_arm64.whl", hash = "sha256:dabe662312a97958e932dee056f2659051d822552c0b866823e8ba1c2fe64770", size = 218091, upload-time = "2025-08-10T21:25:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/b8/62/13c0b66e966c43d7aa64dadc8cd2afa1f5a2bf9bb863bdabc21fb94e8b63/coverage-7.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:449c1e2d3a84d18bd204258a897a87bc57380072eb2aded6a5b5226046207b42", size = 216262, upload-time = "2025-08-10T21:25:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/59fdf79be7ac2f0206fc739032f482cfd3f66b18f5248108ff192741beae/coverage-7.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d4f9ce50b9261ad196dc2b2e9f1fbbee21651b54c3097a25ad783679fd18294", size = 216496, upload-time = "2025-08-10T21:25:56.759Z" }, + { url = "https://files.pythonhosted.org/packages/34/b1/bc83788ba31bde6a0c02eb96bbc14b2d1eb083ee073beda18753fa2c4c66/coverage-7.10.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4dd4564207b160d0d45c36a10bc0a3d12563028e8b48cd6459ea322302a156d7", size = 247989, upload-time = "2025-08-10T21:25:58.067Z" }, + { url = "https://files.pythonhosted.org/packages/0c/29/f8bdf88357956c844bd872e87cb16748a37234f7f48c721dc7e981145eb7/coverage-7.10.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5ca3c9530ee072b7cb6a6ea7b640bcdff0ad3b334ae9687e521e59f79b1d0437", size = 250738, upload-time = "2025-08-10T21:25:59.406Z" }, + { url = "https://files.pythonhosted.org/packages/ae/df/6396301d332b71e42bbe624670af9376f63f73a455cc24723656afa95796/coverage-7.10.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b6df359e59fa243c9925ae6507e27f29c46698359f45e568fd51b9315dbbe587", size = 251868, upload-time = "2025-08-10T21:26:00.65Z" }, + { url = "https://files.pythonhosted.org/packages/91/21/d760b2df6139b6ef62c9cc03afb9bcdf7d6e36ed4d078baacffa618b4c1c/coverage-7.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a181e4c2c896c2ff64c6312db3bda38e9ade2e1aa67f86a5628ae85873786cea", size = 249790, upload-time = "2025-08-10T21:26:02.009Z" }, + { url = "https://files.pythonhosted.org/packages/69/91/5dcaa134568202397fa4023d7066d4318dc852b53b428052cd914faa05e1/coverage-7.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a374d4e923814e8b72b205ef6b3d3a647bb50e66f3558582eda074c976923613", size = 247907, upload-time = "2025-08-10T21:26:03.757Z" }, + { url = "https://files.pythonhosted.org/packages/38/ed/70c0e871cdfef75f27faceada461206c1cc2510c151e1ef8d60a6fedda39/coverage-7.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:daeefff05993e5e8c6e7499a8508e7bd94502b6b9a9159c84fd1fe6bce3151cb", size = 249344, upload-time = "2025-08-10T21:26:05.11Z" }, + { url = "https://files.pythonhosted.org/packages/5f/55/c8a273ed503cedc07f8a00dcd843daf28e849f0972e4c6be4c027f418ad6/coverage-7.10.3-cp312-cp312-win32.whl", hash = "sha256:187ecdcac21f9636d570e419773df7bd2fda2e7fa040f812e7f95d0bddf5f79a", size = 218693, upload-time = "2025-08-10T21:26:06.534Z" }, + { url = "https://files.pythonhosted.org/packages/94/58/dd3cfb2473b85be0b6eb8c5b6d80b6fc3f8f23611e69ef745cef8cf8bad5/coverage-7.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:4a50ad2524ee7e4c2a95e60d2b0b83283bdfc745fe82359d567e4f15d3823eb5", size = 219501, upload-time = "2025-08-10T21:26:08.195Z" }, + { url = "https://files.pythonhosted.org/packages/56/af/7cbcbf23d46de6f24246e3f76b30df099d05636b30c53c158a196f7da3ad/coverage-7.10.3-cp312-cp312-win_arm64.whl", hash = "sha256:c112f04e075d3495fa3ed2200f71317da99608cbb2e9345bdb6de8819fc30571", size = 218135, upload-time = "2025-08-10T21:26:09.584Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/239e4de9cc149c80e9cc359fab60592365b8c4cbfcad58b8a939d18c6898/coverage-7.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b99e87304ffe0eb97c5308447328a584258951853807afdc58b16143a530518a", size = 216298, upload-time = "2025-08-10T21:26:10.973Z" }, + { url = "https://files.pythonhosted.org/packages/56/da/28717da68f8ba68f14b9f558aaa8f3e39ada8b9a1ae4f4977c8f98b286d5/coverage-7.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4af09c7574d09afbc1ea7da9dcea23665c01f3bc1b1feb061dac135f98ffc53a", size = 216546, upload-time = "2025-08-10T21:26:12.616Z" }, + { url = "https://files.pythonhosted.org/packages/de/bb/e1ade16b9e3f2d6c323faeb6bee8e6c23f3a72760a5d9af102ef56a656cb/coverage-7.10.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:488e9b50dc5d2aa9521053cfa706209e5acf5289e81edc28291a24f4e4488f46", size = 247538, upload-time = "2025-08-10T21:26:14.455Z" }, + { url = "https://files.pythonhosted.org/packages/ea/2f/6ae1db51dc34db499bfe340e89f79a63bd115fc32513a7bacdf17d33cd86/coverage-7.10.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:913ceddb4289cbba3a310704a424e3fb7aac2bc0c3a23ea473193cb290cf17d4", size = 250141, upload-time = "2025-08-10T21:26:15.787Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ed/33efd8819895b10c66348bf26f011dd621e804866c996ea6893d682218df/coverage-7.10.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b1f91cbc78c7112ab84ed2a8defbccd90f888fcae40a97ddd6466b0bec6ae8a", size = 251415, upload-time = "2025-08-10T21:26:17.535Z" }, + { url = "https://files.pythonhosted.org/packages/26/04/cb83826f313d07dc743359c9914d9bc460e0798da9a0e38b4f4fabc207ed/coverage-7.10.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0bac054d45af7cd938834b43a9878b36ea92781bcb009eab040a5b09e9927e3", size = 249575, upload-time = "2025-08-10T21:26:18.921Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fd/ae963c7a8e9581c20fa4355ab8940ca272554d8102e872dbb932a644e410/coverage-7.10.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fe72cbdd12d9e0f4aca873fa6d755e103888a7f9085e4a62d282d9d5b9f7928c", size = 247466, upload-time = "2025-08-10T21:26:20.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/e8/b68d1487c6af370b8d5ef223c6d7e250d952c3acfbfcdbf1a773aa0da9d2/coverage-7.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c1e2e927ab3eadd7c244023927d646e4c15c65bb2ac7ae3c3e9537c013700d21", size = 249084, upload-time = "2025-08-10T21:26:21.638Z" }, + { url = "https://files.pythonhosted.org/packages/66/4d/a0bcb561645c2c1e21758d8200443669d6560d2a2fb03955291110212ec4/coverage-7.10.3-cp313-cp313-win32.whl", hash = "sha256:24d0c13de473b04920ddd6e5da3c08831b1170b8f3b17461d7429b61cad59ae0", size = 218735, upload-time = "2025-08-10T21:26:23.009Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c3/78b4adddbc0feb3b223f62761e5f9b4c5a758037aaf76e0a5845e9e35e48/coverage-7.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:3564aae76bce4b96e2345cf53b4c87e938c4985424a9be6a66ee902626edec4c", size = 219531, upload-time = "2025-08-10T21:26:24.474Z" }, + { url = "https://files.pythonhosted.org/packages/70/1b/1229c0b2a527fa5390db58d164aa896d513a1fbb85a1b6b6676846f00552/coverage-7.10.3-cp313-cp313-win_arm64.whl", hash = "sha256:f35580f19f297455f44afcd773c9c7a058e52eb6eb170aa31222e635f2e38b87", size = 218162, upload-time = "2025-08-10T21:26:25.847Z" }, + { url = "https://files.pythonhosted.org/packages/fc/26/1c1f450e15a3bf3eaecf053ff64538a2612a23f05b21d79ce03be9ff5903/coverage-7.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07009152f497a0464ffdf2634586787aea0e69ddd023eafb23fc38267db94b84", size = 217003, upload-time = "2025-08-10T21:26:27.231Z" }, + { url = "https://files.pythonhosted.org/packages/29/96/4b40036181d8c2948454b458750960956a3c4785f26a3c29418bbbee1666/coverage-7.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd2ba5f0c7e7e8cc418be2f0c14c4d9e3f08b8fb8e4c0f83c2fe87d03eb655e", size = 217238, upload-time = "2025-08-10T21:26:28.83Z" }, + { url = "https://files.pythonhosted.org/packages/62/23/8dfc52e95da20957293fb94d97397a100e63095ec1e0ef5c09dd8c6f591a/coverage-7.10.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1ae22b97003c74186e034a93e4f946c75fad8c0ce8d92fbbc168b5e15ee2841f", size = 258561, upload-time = "2025-08-10T21:26:30.475Z" }, + { url = "https://files.pythonhosted.org/packages/59/95/00e7fcbeda3f632232f4c07dde226afe3511a7781a000aa67798feadc535/coverage-7.10.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eb329f1046888a36b1dc35504d3029e1dd5afe2196d94315d18c45ee380f67d5", size = 260735, upload-time = "2025-08-10T21:26:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4c/f4666cbc4571804ba2a65b078ff0de600b0b577dc245389e0bc9b69ae7ca/coverage-7.10.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce01048199a91f07f96ca3074b0c14021f4fe7ffd29a3e6a188ac60a5c3a4af8", size = 262960, upload-time = "2025-08-10T21:26:33.701Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a5/8a9e8a7b12a290ed98b60f73d1d3e5e9ced75a4c94a0d1a671ce3ddfff2a/coverage-7.10.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08b989a06eb9dfacf96d42b7fb4c9a22bafa370d245dc22fa839f2168c6f9fa1", size = 260515, upload-time = "2025-08-10T21:26:35.16Z" }, + { url = "https://files.pythonhosted.org/packages/86/11/bb59f7f33b2cac0c5b17db0d9d0abba9c90d9eda51a6e727b43bd5fce4ae/coverage-7.10.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:669fe0d4e69c575c52148511029b722ba8d26e8a3129840c2ce0522e1452b256", size = 258278, upload-time = "2025-08-10T21:26:36.539Z" }, + { url = "https://files.pythonhosted.org/packages/cc/22/3646f8903743c07b3e53fded0700fed06c580a980482f04bf9536657ac17/coverage-7.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3262d19092771c83f3413831d9904b1ccc5f98da5de4ffa4ad67f5b20c7aaf7b", size = 259408, upload-time = "2025-08-10T21:26:37.954Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/6375e9d905da22ddea41cd85c30994b8b6f6c02e44e4c5744b76d16b026f/coverage-7.10.3-cp313-cp313t-win32.whl", hash = "sha256:cc0ee4b2ccd42cab7ee6be46d8a67d230cb33a0a7cd47a58b587a7063b6c6b0e", size = 219396, upload-time = "2025-08-10T21:26:39.426Z" }, + { url = "https://files.pythonhosted.org/packages/33/3b/7da37fd14412b8c8b6e73c3e7458fef6b1b05a37f990a9776f88e7740c89/coverage-7.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:03db599f213341e2960430984e04cf35fb179724e052a3ee627a068653cf4a7c", size = 220458, upload-time = "2025-08-10T21:26:40.905Z" }, + { url = "https://files.pythonhosted.org/packages/28/cc/59a9a70f17edab513c844ee7a5c63cf1057041a84cc725b46a51c6f8301b/coverage-7.10.3-cp313-cp313t-win_arm64.whl", hash = "sha256:46eae7893ba65f53c71284585a262f083ef71594f05ec5c85baf79c402369098", size = 218722, upload-time = "2025-08-10T21:26:42.362Z" }, + { url = "https://files.pythonhosted.org/packages/2d/84/bb773b51a06edbf1231b47dc810a23851f2796e913b335a0fa364773b842/coverage-7.10.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:bce8b8180912914032785850d8f3aacb25ec1810f5f54afc4a8b114e7a9b55de", size = 216280, upload-time = "2025-08-10T21:26:44.132Z" }, + { url = "https://files.pythonhosted.org/packages/92/a8/4d8ca9c111d09865f18d56facff64d5fa076a5593c290bd1cfc5dceb8dba/coverage-7.10.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07790b4b37d56608536f7c1079bd1aa511567ac2966d33d5cec9cf520c50a7c8", size = 216557, upload-time = "2025-08-10T21:26:45.598Z" }, + { url = "https://files.pythonhosted.org/packages/fe/b2/eb668bfc5060194bc5e1ccd6f664e8e045881cfee66c42a2aa6e6c5b26e8/coverage-7.10.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e79367ef2cd9166acedcbf136a458dfe9a4a2dd4d1ee95738fb2ee581c56f667", size = 247598, upload-time = "2025-08-10T21:26:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/fd/b0/9faa4ac62c8822219dd83e5d0e73876398af17d7305968aed8d1606d1830/coverage-7.10.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:419d2a0f769f26cb1d05e9ccbc5eab4cb5d70231604d47150867c07822acbdf4", size = 250131, upload-time = "2025-08-10T21:26:48.65Z" }, + { url = "https://files.pythonhosted.org/packages/4e/90/203537e310844d4bf1bdcfab89c1e05c25025c06d8489b9e6f937ad1a9e2/coverage-7.10.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee221cf244757cdc2ac882e3062ab414b8464ad9c884c21e878517ea64b3fa26", size = 251485, upload-time = "2025-08-10T21:26:50.368Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b2/9d894b26bc53c70a1fe503d62240ce6564256d6d35600bdb86b80e516e7d/coverage-7.10.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c2079d8cdd6f7373d628e14b3357f24d1db02c9dc22e6a007418ca7a2be0435a", size = 249488, upload-time = "2025-08-10T21:26:52.045Z" }, + { url = "https://files.pythonhosted.org/packages/b4/28/af167dbac5281ba6c55c933a0ca6675d68347d5aee39cacc14d44150b922/coverage-7.10.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:bd8df1f83c0703fa3ca781b02d36f9ec67ad9cb725b18d486405924f5e4270bd", size = 247419, upload-time = "2025-08-10T21:26:53.533Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1c/9a4ddc9f0dcb150d4cd619e1c4bb39bcf694c6129220bdd1e5895d694dda/coverage-7.10.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6b4e25e0fa335c8aa26e42a52053f3786a61cc7622b4d54ae2dad994aa754fec", size = 248917, upload-time = "2025-08-10T21:26:55.11Z" }, + { url = "https://files.pythonhosted.org/packages/92/27/c6a60c7cbe10dbcdcd7fc9ee89d531dc04ea4c073800279bb269954c5a9f/coverage-7.10.3-cp314-cp314-win32.whl", hash = "sha256:d7c3d02c2866deb217dce664c71787f4b25420ea3eaf87056f44fb364a3528f5", size = 218999, upload-time = "2025-08-10T21:26:56.637Z" }, + { url = "https://files.pythonhosted.org/packages/36/09/a94c1369964ab31273576615d55e7d14619a1c47a662ed3e2a2fe4dee7d4/coverage-7.10.3-cp314-cp314-win_amd64.whl", hash = "sha256:9c8916d44d9e0fe6cdb2227dc6b0edd8bc6c8ef13438bbbf69af7482d9bb9833", size = 219801, upload-time = "2025-08-10T21:26:58.207Z" }, + { url = "https://files.pythonhosted.org/packages/23/59/f5cd2a80f401c01cf0f3add64a7b791b7d53fd6090a4e3e9ea52691cf3c4/coverage-7.10.3-cp314-cp314-win_arm64.whl", hash = "sha256:1007d6a2b3cf197c57105cc1ba390d9ff7f0bee215ced4dea530181e49c65ab4", size = 218381, upload-time = "2025-08-10T21:26:59.707Z" }, + { url = "https://files.pythonhosted.org/packages/73/3d/89d65baf1ea39e148ee989de6da601469ba93c1d905b17dfb0b83bd39c96/coverage-7.10.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ebc8791d346410d096818788877d675ca55c91db87d60e8f477bd41c6970ffc6", size = 217019, upload-time = "2025-08-10T21:27:01.242Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7d/d9850230cd9c999ce3a1e600f85c2fff61a81c301334d7a1faa1a5ba19c8/coverage-7.10.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f4e4d8e75f6fd3c6940ebeed29e3d9d632e1f18f6fb65d33086d99d4d073241", size = 217237, upload-time = "2025-08-10T21:27:03.442Z" }, + { url = "https://files.pythonhosted.org/packages/36/51/b87002d417202ab27f4a1cd6bd34ee3b78f51b3ddbef51639099661da991/coverage-7.10.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:24581ed69f132b6225a31b0228ae4885731cddc966f8a33fe5987288bdbbbd5e", size = 258735, upload-time = "2025-08-10T21:27:05.124Z" }, + { url = "https://files.pythonhosted.org/packages/1c/02/1f8612bfcb46fc7ca64a353fff1cd4ed932bb6e0b4e0bb88b699c16794b8/coverage-7.10.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec151569ddfccbf71bac8c422dce15e176167385a00cd86e887f9a80035ce8a5", size = 260901, upload-time = "2025-08-10T21:27:06.68Z" }, + { url = "https://files.pythonhosted.org/packages/aa/3a/fe39e624ddcb2373908bd922756384bb70ac1c5009b0d1674eb326a3e428/coverage-7.10.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2ae8e7c56290b908ee817200c0b65929b8050bc28530b131fe7c6dfee3e7d86b", size = 263157, upload-time = "2025-08-10T21:27:08.398Z" }, + { url = "https://files.pythonhosted.org/packages/5e/89/496b6d5a10fa0d0691a633bb2b2bcf4f38f0bdfcbde21ad9e32d1af328ed/coverage-7.10.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb742309766d7e48e9eb4dc34bc95a424707bc6140c0e7d9726e794f11b92a0", size = 260597, upload-time = "2025-08-10T21:27:10.237Z" }, + { url = "https://files.pythonhosted.org/packages/b6/a6/8b5bf6a9e8c6aaeb47d5fe9687014148efc05c3588110246d5fdeef9b492/coverage-7.10.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c65e2a5b32fbe1e499f1036efa6eb9cb4ea2bf6f7168d0e7a5852f3024f471b1", size = 258353, upload-time = "2025-08-10T21:27:11.773Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6d/ad131be74f8afd28150a07565dfbdc86592fd61d97e2dc83383d9af219f0/coverage-7.10.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d48d2cb07d50f12f4f18d2bb75d9d19e3506c26d96fffabf56d22936e5ed8f7c", size = 259504, upload-time = "2025-08-10T21:27:13.254Z" }, + { url = "https://files.pythonhosted.org/packages/ec/30/fc9b5097092758cba3375a8cc4ff61774f8cd733bcfb6c9d21a60077a8d8/coverage-7.10.3-cp314-cp314t-win32.whl", hash = "sha256:dec0d9bc15ee305e09fe2cd1911d3f0371262d3cfdae05d79515d8cb712b4869", size = 219782, upload-time = "2025-08-10T21:27:14.736Z" }, + { url = "https://files.pythonhosted.org/packages/72/9b/27fbf79451b1fac15c4bda6ec6e9deae27cf7c0648c1305aa21a3454f5c4/coverage-7.10.3-cp314-cp314t-win_amd64.whl", hash = "sha256:424ea93a323aa0f7f01174308ea78bde885c3089ec1bef7143a6d93c3e24ef64", size = 220898, upload-time = "2025-08-10T21:27:16.297Z" }, + { url = "https://files.pythonhosted.org/packages/d1/cf/a32bbf92869cbf0b7c8b84325327bfc718ad4b6d2c63374fef3d58e39306/coverage-7.10.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f5983c132a62d93d71c9ef896a0b9bf6e6828d8d2ea32611f58684fba60bba35", size = 218922, upload-time = "2025-08-10T21:27:18.22Z" }, + { url = "https://files.pythonhosted.org/packages/f1/66/c06f4a93c65b6fc6578ef4f1fe51f83d61fc6f2a74ec0ce434ed288d834a/coverage-7.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da749daa7e141985487e1ff90a68315b0845930ed53dc397f4ae8f8bab25b551", size = 215951, upload-time = "2025-08-10T21:27:19.815Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ea/cc18c70a6f72f8e4def212eaebd8388c64f29608da10b3c38c8ec76f5e49/coverage-7.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3126fb6a47d287f461d9b1aa5d1a8c97034d1dffb4f452f2cf211289dae74ef", size = 216335, upload-time = "2025-08-10T21:27:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fb/9c6d1d67c6d54b149f06b9f374bc9ca03e4d7d7784c8cfd12ceda20e3787/coverage-7.10.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3da794db13cc27ca40e1ec8127945b97fab78ba548040047d54e7bfa6d442dca", size = 242772, upload-time = "2025-08-10T21:27:23.884Z" }, + { url = "https://files.pythonhosted.org/packages/5a/e5/4223bdb28b992a19a13ab1410c761e2bfe92ca1e7bba8e85ee2024eeda85/coverage-7.10.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4e27bebbd184ef8d1c1e092b74a2b7109dcbe2618dce6e96b1776d53b14b3fe8", size = 244596, upload-time = "2025-08-10T21:27:25.842Z" }, + { url = "https://files.pythonhosted.org/packages/d2/13/d646ba28613669d487c654a760571c10128247d12d9f50e93f69542679a2/coverage-7.10.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8fd4ee2580b9fefbd301b4f8f85b62ac90d1e848bea54f89a5748cf132782118", size = 246370, upload-time = "2025-08-10T21:27:27.503Z" }, + { url = "https://files.pythonhosted.org/packages/02/7c/aff99c67d8c383142b0877ee435caf493765356336211c4899257325d6c7/coverage-7.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6999920bdd73259ce11cabfc1307484f071ecc6abdb2ca58d98facbcefc70f16", size = 244254, upload-time = "2025-08-10T21:27:29.357Z" }, + { url = "https://files.pythonhosted.org/packages/b0/13/a51ea145ed51ddfa8717bb29926d9111aca343fab38f04692a843d50be6b/coverage-7.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3623f929db885fab100cb88220a5b193321ed37e03af719efdbaf5d10b6e227", size = 242325, upload-time = "2025-08-10T21:27:30.931Z" }, + { url = "https://files.pythonhosted.org/packages/d8/4b/6119be0089c89ad49d2e5a508d55a1485c878642b706a7f95b26e299137d/coverage-7.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:25b902c5e15dea056485d782e420bb84621cc08ee75d5131ecb3dbef8bd1365f", size = 243281, upload-time = "2025-08-10T21:27:32.815Z" }, + { url = "https://files.pythonhosted.org/packages/34/c8/1b2e7e53eee4bc1304e56e10361b08197a77a26ceb07201dcc9e759ef132/coverage-7.10.3-cp39-cp39-win32.whl", hash = "sha256:f930a4d92b004b643183451fe9c8fe398ccf866ed37d172ebaccfd443a097f61", size = 218489, upload-time = "2025-08-10T21:27:34.905Z" }, + { url = "https://files.pythonhosted.org/packages/dd/1e/9c0c230a199809c39e2dff0f1f889dfb04dcd07d83c1c26a8ef671660e08/coverage-7.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:08e638a93c8acba13c7842953f92a33d52d73e410329acd472280d2a21a6c0e1", size = 219396, upload-time = "2025-08-10T21:27:36.61Z" }, + { url = "https://files.pythonhosted.org/packages/84/19/e67f4ae24e232c7f713337f3f4f7c9c58afd0c02866fb07c7b9255a19ed7/coverage-7.10.3-py3-none-any.whl", hash = "sha256:416a8d74dc0adfd33944ba2f405897bab87b7e9e84a391e09d241956bd953ce1", size = 207921, upload-time = "2025-08-10T21:27:38.254Z" }, ] [package.optional-dependencies] @@ -603,7 +705,7 @@ dependencies = [ { name = "gitpython" }, { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pandas" }, { name = "requests" }, { name = "ruptures" }, @@ -621,12 +723,12 @@ dev = [ { name = "ipykernel" }, { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython", version = "9.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "matplotlib", version = "3.9.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "matplotlib", version = "3.10.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "matplotlib", version = "3.10.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "mypy" }, { name = "pandas-stubs", version = "2.2.2.240807", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "pandas-stubs", version = "2.2.3.250527", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pandas-stubs", version = "2.3.0.250703", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "pre-commit" }, { name = "pyarrow" }, { name = "pytest" }, @@ -676,31 +778,31 @@ dev = [ [[package]] name = "debugpy" -version = "1.8.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/75/087fe07d40f490a78782ff3b0a30e3968936854105487decdb33446d4b0e/debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322", size = 1641444, upload-time = "2025-04-10T19:46:10.981Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/df/156df75a41aaebd97cee9d3870fe68f8001b6c1c4ca023e221cfce69bece/debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339", size = 2076510, upload-time = "2025-04-10T19:46:13.315Z" }, - { url = "https://files.pythonhosted.org/packages/69/cd/4fc391607bca0996db5f3658762106e3d2427beaef9bfd363fd370a3c054/debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79", size = 3559614, upload-time = "2025-04-10T19:46:14.647Z" }, - { url = "https://files.pythonhosted.org/packages/1a/42/4e6d2b9d63e002db79edfd0cb5656f1c403958915e0e73ab3e9220012eec/debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987", size = 5208588, upload-time = "2025-04-10T19:46:16.233Z" }, - { url = "https://files.pythonhosted.org/packages/97/b1/cc9e4e5faadc9d00df1a64a3c2d5c5f4b9df28196c39ada06361c5141f89/debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84", size = 5241043, upload-time = "2025-04-10T19:46:17.768Z" }, - { url = "https://files.pythonhosted.org/packages/67/e8/57fe0c86915671fd6a3d2d8746e40485fd55e8d9e682388fbb3a3d42b86f/debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9", size = 2175064, upload-time = "2025-04-10T19:46:19.486Z" }, - { url = "https://files.pythonhosted.org/packages/3b/97/2b2fd1b1c9569c6764ccdb650a6f752e4ac31be465049563c9eb127a8487/debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2", size = 3132359, upload-time = "2025-04-10T19:46:21.192Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ee/b825c87ed06256ee2a7ed8bab8fb3bb5851293bf9465409fdffc6261c426/debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2", size = 5133269, upload-time = "2025-04-10T19:46:23.047Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a6/6c70cd15afa43d37839d60f324213843174c1d1e6bb616bd89f7c1341bac/debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01", size = 5158156, upload-time = "2025-04-10T19:46:24.521Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2a/ac2df0eda4898f29c46eb6713a5148e6f8b2b389c8ec9e425a4a1d67bf07/debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84", size = 2501268, upload-time = "2025-04-10T19:46:26.044Z" }, - { url = "https://files.pythonhosted.org/packages/10/53/0a0cb5d79dd9f7039169f8bf94a144ad3efa52cc519940b3b7dde23bcb89/debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826", size = 4221077, upload-time = "2025-04-10T19:46:27.464Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d5/84e01821f362327bf4828728aa31e907a2eca7c78cd7c6ec062780d249f8/debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f", size = 5255127, upload-time = "2025-04-10T19:46:29.467Z" }, - { url = "https://files.pythonhosted.org/packages/33/16/1ed929d812c758295cac7f9cf3dab5c73439c83d9091f2d91871e648093e/debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f", size = 5297249, upload-time = "2025-04-10T19:46:31.538Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/395c792b243f2367d84202dc33689aa3d910fb9826a7491ba20fc9e261f5/debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f", size = 2485676, upload-time = "2025-04-10T19:46:32.96Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f1/6f2ee3f991327ad9e4c2f8b82611a467052a0fb0e247390192580e89f7ff/debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15", size = 4217514, upload-time = "2025-04-10T19:46:34.336Z" }, - { url = "https://files.pythonhosted.org/packages/79/28/b9d146f8f2dc535c236ee09ad3e5ac899adb39d7a19b49f03ac95d216beb/debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e", size = 5254756, upload-time = "2025-04-10T19:46:36.199Z" }, - { url = "https://files.pythonhosted.org/packages/e0/62/a7b4a57013eac4ccaef6977966e6bec5c63906dd25a86e35f155952e29a1/debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e", size = 5297119, upload-time = "2025-04-10T19:46:38.141Z" }, - { url = "https://files.pythonhosted.org/packages/85/6f/96ba96545f55b6a675afa08c96b42810de9b18c7ad17446bbec82762127a/debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f", size = 2077696, upload-time = "2025-04-10T19:46:46.817Z" }, - { url = "https://files.pythonhosted.org/packages/fa/84/f378a2dd837d94de3c85bca14f1db79f8fcad7e20b108b40d59da56a6d22/debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea", size = 3554846, upload-time = "2025-04-10T19:46:48.72Z" }, - { url = "https://files.pythonhosted.org/packages/db/52/88824fe5d6893f59933f664c6e12783749ab537a2101baf5c713164d8aa2/debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d", size = 5209350, upload-time = "2025-04-10T19:46:50.284Z" }, - { url = "https://files.pythonhosted.org/packages/41/35/72e9399be24a04cb72cfe1284572c9fcd1d742c7fa23786925c18fa54ad8/debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123", size = 5241852, upload-time = "2025-04-10T19:46:52.022Z" }, - { url = "https://files.pythonhosted.org/packages/97/1a/481f33c37ee3ac8040d3d51fc4c4e4e7e61cb08b8bc8971d6032acc2279f/debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20", size = 5256230, upload-time = "2025-04-10T19:46:54.077Z" }, +version = "1.8.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/d4/722d0bcc7986172ac2ef3c979ad56a1030e3afd44ced136d45f8142b1f4a/debugpy-1.8.16.tar.gz", hash = "sha256:31e69a1feb1cf6b51efbed3f6c9b0ef03bc46ff050679c4be7ea6d2e23540870", size = 1643809, upload-time = "2025-08-06T18:00:02.647Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/fd/f1b75ebc61d90882595b81d808efd3573c082e1c3407850d9dccac4ae904/debugpy-1.8.16-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:2a3958fb9c2f40ed8ea48a0d34895b461de57a1f9862e7478716c35d76f56c65", size = 2085511, upload-time = "2025-08-06T18:00:05.067Z" }, + { url = "https://files.pythonhosted.org/packages/df/5e/c5c1934352871128b30a1a144a58b5baa546e1b57bd47dbed788bad4431c/debugpy-1.8.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5ca7314042e8a614cc2574cd71f6ccd7e13a9708ce3c6d8436959eae56f2378", size = 3562094, upload-time = "2025-08-06T18:00:06.66Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d5/2ebe42377e5a78dc786afc25e61ee83c5628d63f32dfa41092597d52fe83/debugpy-1.8.16-cp310-cp310-win32.whl", hash = "sha256:8624a6111dc312ed8c363347a0b59c5acc6210d897e41a7c069de3c53235c9a6", size = 5234277, upload-time = "2025-08-06T18:00:08.429Z" }, + { url = "https://files.pythonhosted.org/packages/54/f8/e774ad16a60b9913213dbabb7472074c5a7b0d84f07c1f383040a9690057/debugpy-1.8.16-cp310-cp310-win_amd64.whl", hash = "sha256:fee6db83ea5c978baf042440cfe29695e1a5d48a30147abf4c3be87513609817", size = 5266011, upload-time = "2025-08-06T18:00:10.162Z" }, + { url = "https://files.pythonhosted.org/packages/63/d6/ad70ba8b49b23fa286fb21081cf732232cc19374af362051da9c7537ae52/debugpy-1.8.16-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:67371b28b79a6a12bcc027d94a06158f2fde223e35b5c4e0783b6f9d3b39274a", size = 2184063, upload-time = "2025-08-06T18:00:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/aa/49/7b03e88dea9759a4c7910143f87f92beb494daaae25560184ff4ae883f9e/debugpy-1.8.16-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2abae6dd02523bec2dee16bd6b0781cccb53fd4995e5c71cc659b5f45581898", size = 3134837, upload-time = "2025-08-06T18:00:13.782Z" }, + { url = "https://files.pythonhosted.org/packages/5d/52/b348930316921de7565fbe37a487d15409041713004f3d74d03eb077dbd4/debugpy-1.8.16-cp311-cp311-win32.whl", hash = "sha256:f8340a3ac2ed4f5da59e064aa92e39edd52729a88fbde7bbaa54e08249a04493", size = 5159142, upload-time = "2025-08-06T18:00:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ef/9aa9549ce1e10cea696d980292e71672a91ee4a6a691ce5f8629e8f48c49/debugpy-1.8.16-cp311-cp311-win_amd64.whl", hash = "sha256:70f5fcd6d4d0c150a878d2aa37391c52de788c3dc680b97bdb5e529cb80df87a", size = 5183117, upload-time = "2025-08-06T18:00:17.251Z" }, + { url = "https://files.pythonhosted.org/packages/61/fb/0387c0e108d842c902801bc65ccc53e5b91d8c169702a9bbf4f7efcedf0c/debugpy-1.8.16-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:b202e2843e32e80b3b584bcebfe0e65e0392920dc70df11b2bfe1afcb7a085e4", size = 2511822, upload-time = "2025-08-06T18:00:18.526Z" }, + { url = "https://files.pythonhosted.org/packages/37/44/19e02745cae22bf96440141f94e15a69a1afaa3a64ddfc38004668fcdebf/debugpy-1.8.16-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64473c4a306ba11a99fe0bb14622ba4fbd943eb004847d9b69b107bde45aa9ea", size = 4230135, upload-time = "2025-08-06T18:00:19.997Z" }, + { url = "https://files.pythonhosted.org/packages/f3/0b/19b1ba5ee4412f303475a2c7ad5858efb99c90eae5ec627aa6275c439957/debugpy-1.8.16-cp312-cp312-win32.whl", hash = "sha256:833a61ed446426e38b0dd8be3e9d45ae285d424f5bf6cd5b2b559c8f12305508", size = 5281271, upload-time = "2025-08-06T18:00:21.281Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e0/bc62e2dc141de53bd03e2c7cb9d7011de2e65e8bdcdaa26703e4d28656ba/debugpy-1.8.16-cp312-cp312-win_amd64.whl", hash = "sha256:75f204684581e9ef3dc2f67687c3c8c183fde2d6675ab131d94084baf8084121", size = 5323149, upload-time = "2025-08-06T18:00:23.033Z" }, + { url = "https://files.pythonhosted.org/packages/62/66/607ab45cc79e60624df386e233ab64a6d8d39ea02e7f80e19c1d451345bb/debugpy-1.8.16-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:85df3adb1de5258dca910ae0bb185e48c98801ec15018a263a92bb06be1c8787", size = 2496157, upload-time = "2025-08-06T18:00:24.361Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a0/c95baae08a75bceabb79868d663a0736655e427ab9c81fb848da29edaeac/debugpy-1.8.16-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee89e948bc236a5c43c4214ac62d28b29388453f5fd328d739035e205365f0b", size = 4222491, upload-time = "2025-08-06T18:00:25.806Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2f/1c8db6ddd8a257c3cd2c46413b267f1d5fa3df910401c899513ce30392d6/debugpy-1.8.16-cp313-cp313-win32.whl", hash = "sha256:cf358066650439847ec5ff3dae1da98b5461ea5da0173d93d5e10f477c94609a", size = 5281126, upload-time = "2025-08-06T18:00:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ba/c3e154ab307366d6c5a9c1b68de04914e2ce7fa2f50d578311d8cc5074b2/debugpy-1.8.16-cp313-cp313-win_amd64.whl", hash = "sha256:b5aea1083f6f50023e8509399d7dc6535a351cc9f2e8827d1e093175e4d9fa4c", size = 5323094, upload-time = "2025-08-06T18:00:29.03Z" }, + { url = "https://files.pythonhosted.org/packages/35/40/acdad5944e508d5e936979ad3e96e56b78ba6d7fa75aaffc4426cb921e12/debugpy-1.8.16-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:135ccd2b1161bade72a7a099c9208811c137a150839e970aeaf121c2467debe8", size = 2086696, upload-time = "2025-08-06T18:00:36.469Z" }, + { url = "https://files.pythonhosted.org/packages/2d/eb/8d6a2cf3b29e272b5dfebe6f384f8457977d4fd7a02dab2cae4d421dbae2/debugpy-1.8.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:211238306331a9089e253fd997213bc4a4c65f949271057d6695953254095376", size = 3557329, upload-time = "2025-08-06T18:00:38.189Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/63b9cc4d3c6980c702911c0f6a9748933ce4e4f16ae0ec4fdef7690f6662/debugpy-1.8.16-cp39-cp39-win32.whl", hash = "sha256:88eb9ffdfb59bf63835d146c183d6dba1f722b3ae2a5f4b9fc03e925b3358922", size = 5235114, upload-time = "2025-08-06T18:00:39.586Z" }, + { url = "https://files.pythonhosted.org/packages/05/cf/80947f57e0ef4d6e33ec9c3a109a542678eba465723bf8b599719238eb93/debugpy-1.8.16-cp39-cp39-win_amd64.whl", hash = "sha256:c2c47c2e52b40449552843b913786499efcc3dbc21d6c49287d939cd0dbc49fd", size = 5266799, upload-time = "2025-08-06T18:00:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ecc9ae29fa5b2d90107cd1d9bf8ed19aacb74b2264d986ae9d44fe9bdf87/debugpy-1.8.16-py2.py3-none-any.whl", hash = "sha256:19c9521962475b87da6f673514f7fd610328757ec993bf7ec0d8c96f9a325f9e", size = 5287700, upload-time = "2025-08-06T18:00:42.333Z" }, ] [[package]] @@ -714,7 +816,7 @@ wheels = [ [[package]] name = "deptry" -version = "0.23.0" +version = "0.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -724,32 +826,32 @@ dependencies = [ { name = "requirements-parser" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/7e/75a1990a7244a3d3c5364353ac76f1173aa568a67793199d09f995b66c29/deptry-0.23.0.tar.gz", hash = "sha256:4915a3590ccf38ad7a9176aee376745aa9de121f50f8da8fb9ccec87fa93e676", size = 200920, upload-time = "2025-01-25T17:01:48.052Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/31/3e2f4a9b43bd807b28a49d673b9b5f8dcc7265d43950b24e875ba90e6205/deptry-0.23.1.tar.gz", hash = "sha256:5d23e0ef25f3c56405c05383a476edda55944563c5c47a3e9249ed3ec860d382", size = 460016, upload-time = "2025-07-31T05:54:49.681Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/85/a8b77c8a87e7c9e81ce8437d752879b5281fd8a0b8a114c6d393f980aa72/deptry-0.23.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1f2a6817a37d76e8f6b667381b7caf6ea3e6d6c18b5be24d36c625f387c79852", size = 1756706, upload-time = "2025-01-25T17:01:45.511Z" }, - { url = "https://files.pythonhosted.org/packages/53/bf/26c58af1467df6e889c6b969c27dad2c67b8bd625320d9db7d70277a222f/deptry-0.23.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:9601b64cc0aed42687fdd5c912d5f1e90d7f7333fb589b14e35bfdfebae866f3", size = 1657001, upload-time = "2025-01-25T17:01:40.913Z" }, - { url = "https://files.pythonhosted.org/packages/ae/7d/b0bd6a50ec3f87b0a5ed3bff64ac2bd5bd8d3205e570bc5bc3170f26a01f/deptry-0.23.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6172b2205f6e84bcc9df25226693d4deb9576a6f746c2ace828f6d13401d357", size = 1754607, upload-time = "2025-01-25T17:01:23.211Z" }, - { url = "https://files.pythonhosted.org/packages/e6/1b/79b1213bb9b58b0bcc200867cd6d64cd76ec4b9c5cdb76f95c3e6ee7b92e/deptry-0.23.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cfa4b3a46ee8a026eaa38e4b9ba43fe6036a07fe16bf0a663cb611b939f6af8", size = 1831961, upload-time = "2025-01-25T17:01:32.702Z" }, - { url = "https://files.pythonhosted.org/packages/09/d6/607004f20637987d437f420f3dad4d6f1a87a4a83380ab60220397ee8fbe/deptry-0.23.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9d03cc99a61c348df92074a50e0a71b28f264f0edbf686084ca90e6fd44e3abe", size = 1932126, upload-time = "2025-01-25T17:01:28.315Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ff/6fff20bf2632727af55dc3a24a6f5634dcdf34fd785402a55207ba49d9cc/deptry-0.23.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9a46f78098f145100dc582a59af8548b26cdfa16cf0fbd85d2d44645e724cb6a", size = 2004755, upload-time = "2025-01-25T17:01:36.842Z" }, - { url = "https://files.pythonhosted.org/packages/41/30/1b6217bdccf2144d4c3e78f89b2a84db82478b2449599c2d3b4b21a89043/deptry-0.23.0-cp39-abi3-win_amd64.whl", hash = "sha256:d53e803b280791d89a051b6183d9dc40411200e22a8ab7e6c32c6b169822a664", size = 1606944, upload-time = "2025-01-25T17:01:54.326Z" }, - { url = "https://files.pythonhosted.org/packages/28/ab/47398041d11b19aa9db28f28cf076dbe42aba3e16d67d3e7911330e3a304/deptry-0.23.0-cp39-abi3-win_arm64.whl", hash = "sha256:da7678624f4626d839c8c03675452cefc59d6cf57d25c84a9711dae514719279", size = 1518394, upload-time = "2025-01-25T17:01:49.099Z" }, - { url = "https://files.pythonhosted.org/packages/42/d7/23cc3de23b23e90cca281105f58c518a11c9a743b425b4a0b0670d0d784c/deptry-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:40706dcbed54141f2d23afa70a272171c8c46531cd6f0f9c8ef482c906b3cee2", size = 1755546, upload-time = "2025-01-25T17:01:46.835Z" }, - { url = "https://files.pythonhosted.org/packages/e6/13/bcc3f728bafe0d2465586b5d7e519c56ff093bb8728ad2828fdf07ac1274/deptry-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:889541844092f18e7b48631852195f36c25c5afd4d7e074b19ba824b430add50", size = 1656307, upload-time = "2025-01-25T17:01:42.516Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1a/d1db8bc3dc4f89172cd0e8285f081c4a43d7489a7bad83572eec823840b6/deptry-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aff9156228eb16cd81792f920c1623c00cb59091ae572600ba0eac587da33c0c", size = 1753353, upload-time = "2025-01-25T17:01:26.189Z" }, - { url = "https://files.pythonhosted.org/packages/eb/44/3346da11053c92dc6b4bec1b737ebe282e926cf32183ed3662c15bbca431/deptry-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:583154732cfd438a4a090b7d13d8b2016f1ac2732534f34fb689345768d8538b", size = 1831330, upload-time = "2025-01-25T17:01:34.418Z" }, - { url = "https://files.pythonhosted.org/packages/85/f0/dcf9c31a7d19a54e80914c741319e2fa04e7a9ffd7fb96ee4e17d52bcb3d/deptry-0.23.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:736e7bc557aec6118b2a4d454f0d81f070782faeaa9d8d3c9a15985c9f265372", size = 1931459, upload-time = "2025-01-25T17:01:30.485Z" }, - { url = "https://files.pythonhosted.org/packages/d1/18/95b9776439eac92c98095adb3cbda15588b22b229f9936df30bb10e573ad/deptry-0.23.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5f7e4b1a5232ed6d352fca7173750610a169377d1951d3e9782947191942a765", size = 2004198, upload-time = "2025-01-25T17:01:38.926Z" }, - { url = "https://files.pythonhosted.org/packages/b2/a9/ea41967d3df7665bab84f1e1e56f7f3a4131ed0a861413a2433bbd9a3c0e/deptry-0.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:04afae204654542406318fd3dd6f4a6697579597f37195437daf84a53ee0ebbf", size = 1607152, upload-time = "2025-01-25T17:01:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d0/9785c0e7fdab12f5324467d70ba65ad03b9d4071a13fc182b6d98bab6208/deptry-0.23.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f0b231d098fb5b48d8973c9f192c353ffdd395770063424969fa7f15ddfea7d8", size = 1768731, upload-time = "2025-07-31T05:54:47.348Z" }, + { url = "https://files.pythonhosted.org/packages/c5/4b/46aded35e0de153936b2214e49e5935179eed9f23cbd3a9a0cd9a5ab0abd/deptry-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:bf057f514bb2fa18a2b192a7f7372bd14577ff46b11486933e8383dfef461983", size = 1667240, upload-time = "2025-07-31T05:54:43.956Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f7/206330f68280a1af7edb8bea87f383dbaa4e3b02b37199d40f86e4c43048/deptry-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ee3f5663bb1c048e2aaf25a4d9e6d09cc1f3b3396ee248980878c6a6c9c0e21", size = 1772019, upload-time = "2025-07-31T05:54:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/c5/80/51a9e94349b47013e2fd78fd221b12202a7866cd2e0882cfd87d63055e88/deptry-0.23.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae0366dc5f50a5fb29cf90de1110c5e368513de6c1b2dac439f2817f3f752616", size = 1855973, upload-time = "2025-07-31T05:54:37.733Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/bff10ddd26ce39c56a9a35bdc98fcf44c2befe5954c8da4bb895e3f750bb/deptry-0.23.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ab156a90a9eda5819aeb1c1da585dd4d5ec509029399a38771a49e78f40db90f", size = 1946957, upload-time = "2025-07-31T05:54:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/c80b190cbd817d1f75f8d02d4b6f4d430b2f3014a09d3895684e291e473b/deptry-0.23.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:651c7eb168233755152fcc468713c024d64a03069645187edb4a17ba61ce6133", size = 2025282, upload-time = "2025-07-31T05:54:40.906Z" }, + { url = "https://files.pythonhosted.org/packages/3c/58/1dfb7a6c4ec2daf123264d2c30f53f45791fee46cd0244be5bf97597d2aa/deptry-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:8da1e8f70e7086ebc228f3a4a3cfb5aa127b09b5eef60d694503d6bb79809025", size = 1631377, upload-time = "2025-07-31T05:54:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/18/d3/667b974cf42fc50245a8028beb9966643ee214ca567cc6df6e876feca5ed/deptry-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:f589497a5809717db4dcf2aa840f2847c0a4c489331608e538850b6a9ab1c30b", size = 1551113, upload-time = "2025-07-31T05:54:50.679Z" }, + { url = "https://files.pythonhosted.org/packages/1a/9f/94f582b1134ce7b5bb1ddacc6d421294064c74451744cebdf7acd009c545/deptry-0.23.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6af91d86380ef703adb6ae65f273d88e3cca7fd315c4c309da857a0cfa728244", size = 1768259, upload-time = "2025-07-31T05:54:48.529Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e7/07f43e87e3eef96d1ddd52610de2b125f8a562f7585be027f2f1d4fff03d/deptry-0.23.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:42a249d317c3128c286035a1f7aaa41a0c3c967f17848817c2e07ca50d5ed450", size = 1667238, upload-time = "2025-07-31T05:54:45.675Z" }, + { url = "https://files.pythonhosted.org/packages/80/4f/4ae155a301fdeddfde724540495bb359774065947cbd4ce31b62cf23719e/deptry-0.23.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d988c7c75201997970bae1e8d564b4c7a14d350556c4f7c269fd33f3b081c314", size = 1771347, upload-time = "2025-07-31T05:54:33.254Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7d/3e2ee4de068a72a960dddb985d9a0198d3c7db261d7cd6ea8bb967161068/deptry-0.23.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae13d8e65ae88b77632c45edb4038301a6f9efcac06715abfde9a029e5879698", size = 1855446, upload-time = "2025-07-31T05:54:39.418Z" }, + { url = "https://files.pythonhosted.org/packages/14/35/018016d88c6602755a75cbb9013c5822f4bacf329a501b638365351f3e44/deptry-0.23.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:40058a7a3fe9dacb745668897ee992e58daf5aac406b668ff2eaaf0f6f586550", size = 1946581, upload-time = "2025-07-31T05:54:36.206Z" }, + { url = "https://files.pythonhosted.org/packages/90/d0/ee75b72ffdebe73ef6a8e1d3960cbdabecd39358516592c1b17ea65f1e98/deptry-0.23.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d111cf4261eeadbdb20051d8d542f04deb3cfced0cb280ece8d654f7f6055921", size = 2024895, upload-time = "2025-07-31T05:54:42.178Z" }, + { url = "https://files.pythonhosted.org/packages/32/72/ac643d909da2e50b1fb78143591079f21649f60572d8224be4ba4d795c2c/deptry-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9f9bbb92f95ada9ccfa5ecefee05ba3c39cfa0734b5483a3a1a3c4eeb9c99054", size = 1631828, upload-time = "2025-07-31T05:54:53.486Z" }, ] [[package]] name = "distlib" -version = "0.3.9" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] [[package]] @@ -789,60 +891,76 @@ wheels = [ [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, ] [[package]] name = "fonttools" -version = "4.58.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/5a/1124b2c8cb3a8015faf552e92714040bcdbc145dfa29928891b02d147a18/fonttools-4.58.4.tar.gz", hash = "sha256:928a8009b9884ed3aae17724b960987575155ca23c6f0b8146e400cc9e0d44ba", size = 3525026, upload-time = "2025-06-13T17:25:15.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/86/d22c24caa574449b56e994ed1a96d23b23af85557fb62a92df96439d3f6c/fonttools-4.58.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:834542f13fee7625ad753b2db035edb674b07522fcbdd0ed9e9a9e2a1034467f", size = 2748349, upload-time = "2025-06-13T17:23:49.179Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b8/384aca93856def00e7de30341f1e27f439694857d82c35d74a809c705ed0/fonttools-4.58.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2e6c61ce330142525296170cd65666e46121fc0d44383cbbcfa39cf8f58383df", size = 2318565, upload-time = "2025-06-13T17:23:52.144Z" }, - { url = "https://files.pythonhosted.org/packages/1a/f2/273edfdc8d9db89ecfbbf659bd894f7e07b6d53448b19837a4bdba148d17/fonttools-4.58.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9c75f8faa29579c0fbf29b56ae6a3660c6c025f3b671803cb6a9caa7e4e3a98", size = 4838855, upload-time = "2025-06-13T17:23:54.039Z" }, - { url = "https://files.pythonhosted.org/packages/13/fa/403703548c093c30b52ab37e109b369558afa221130e67f06bef7513f28a/fonttools-4.58.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:88dedcedbd5549e35b2ea3db3de02579c27e62e51af56779c021e7b33caadd0e", size = 4767637, upload-time = "2025-06-13T17:23:56.17Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a8/3380e1e0bff6defb0f81c9abf274a5b4a0f30bc8cab4fd4e346c6f923b4c/fonttools-4.58.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae80a895adab43586f4da1521d58fd4f4377cef322ee0cc205abcefa3a5effc3", size = 4819397, upload-time = "2025-06-13T17:23:58.263Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1b/99e47eb17a8ca51d808622a4658584fa8f340857438a4e9d7ac326d4a041/fonttools-4.58.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0d3acc7f0d151da116e87a182aefb569cf0a3c8e0fd4c9cd0a7c1e7d3e7adb26", size = 4926641, upload-time = "2025-06-13T17:24:00.368Z" }, - { url = "https://files.pythonhosted.org/packages/31/75/415254408f038e35b36c8525fc31feb8561f98445688dd2267c23eafd7a2/fonttools-4.58.4-cp310-cp310-win32.whl", hash = "sha256:1244f69686008e7e8d2581d9f37eef330a73fee3843f1107993eb82c9d306577", size = 2201917, upload-time = "2025-06-13T17:24:02.587Z" }, - { url = "https://files.pythonhosted.org/packages/c5/69/f019a15ed2946317c5318e1bcc8876f8a54a313848604ad1d4cfc4c07916/fonttools-4.58.4-cp310-cp310-win_amd64.whl", hash = "sha256:2a66c0af8a01eb2b78645af60f3b787de5fe5eb1fd8348163715b80bdbfbde1f", size = 2246327, upload-time = "2025-06-13T17:24:04.087Z" }, - { url = "https://files.pythonhosted.org/packages/17/7b/cc6e9bb41bab223bd2dc70ba0b21386b85f604e27f4c3206b4205085a2ab/fonttools-4.58.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3841991c9ee2dc0562eb7f23d333d34ce81e8e27c903846f0487da21e0028eb", size = 2768901, upload-time = "2025-06-13T17:24:05.901Z" }, - { url = "https://files.pythonhosted.org/packages/3d/15/98d75df9f2b4e7605f3260359ad6e18e027c11fa549f74fce567270ac891/fonttools-4.58.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c98f91b6a9604e7ffb5ece6ea346fa617f967c2c0944228801246ed56084664", size = 2328696, upload-time = "2025-06-13T17:24:09.18Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c8/dc92b80f5452c9c40164e01b3f78f04b835a00e673bd9355ca257008ff61/fonttools-4.58.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab9f891eb687ddf6a4e5f82901e00f992e18012ca97ab7acd15f13632acd14c1", size = 5018830, upload-time = "2025-06-13T17:24:11.282Z" }, - { url = "https://files.pythonhosted.org/packages/19/48/8322cf177680505d6b0b6062e204f01860cb573466a88077a9b795cb70e8/fonttools-4.58.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:891c5771e8f0094b7c0dc90eda8fc75e72930b32581418f2c285a9feedfd9a68", size = 4960922, upload-time = "2025-06-13T17:24:14.9Z" }, - { url = "https://files.pythonhosted.org/packages/14/e0/2aff149ed7eb0916de36da513d473c6fff574a7146891ce42de914899395/fonttools-4.58.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:43ba4d9646045c375d22e3473b7d82b18b31ee2ac715cd94220ffab7bc2d5c1d", size = 4997135, upload-time = "2025-06-13T17:24:16.959Z" }, - { url = "https://files.pythonhosted.org/packages/e6/6f/4d9829b29a64a2e63a121cb11ecb1b6a9524086eef3e35470949837a1692/fonttools-4.58.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33d19f16e6d2ffd6669bda574a6589941f6c99a8d5cfb9f464038244c71555de", size = 5108701, upload-time = "2025-06-13T17:24:18.849Z" }, - { url = "https://files.pythonhosted.org/packages/6f/1e/2d656ddd1b0cd0d222f44b2d008052c2689e66b702b9af1cd8903ddce319/fonttools-4.58.4-cp311-cp311-win32.whl", hash = "sha256:b59e5109b907da19dc9df1287454821a34a75f2632a491dd406e46ff432c2a24", size = 2200177, upload-time = "2025-06-13T17:24:20.823Z" }, - { url = "https://files.pythonhosted.org/packages/fb/83/ba71ad053fddf4157cb0697c8da8eff6718d059f2a22986fa5f312b49c92/fonttools-4.58.4-cp311-cp311-win_amd64.whl", hash = "sha256:3d471a5b567a0d1648f2e148c9a8bcf00d9ac76eb89e976d9976582044cc2509", size = 2247892, upload-time = "2025-06-13T17:24:22.927Z" }, - { url = "https://files.pythonhosted.org/packages/04/3c/1d1792bfe91ef46f22a3d23b4deb514c325e73c17d4f196b385b5e2faf1c/fonttools-4.58.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:462211c0f37a278494e74267a994f6be9a2023d0557aaa9ecbcbfce0f403b5a6", size = 2754082, upload-time = "2025-06-13T17:24:24.862Z" }, - { url = "https://files.pythonhosted.org/packages/2a/1f/2b261689c901a1c3bc57a6690b0b9fc21a9a93a8b0c83aae911d3149f34e/fonttools-4.58.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0c7a12fb6f769165547f00fcaa8d0df9517603ae7e04b625e5acb8639809b82d", size = 2321677, upload-time = "2025-06-13T17:24:26.815Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6b/4607add1755a1e6581ae1fc0c9a640648e0d9cdd6591cc2d581c2e07b8c3/fonttools-4.58.4-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d42c63020a922154add0a326388a60a55504629edc3274bc273cd3806b4659f", size = 4896354, upload-time = "2025-06-13T17:24:28.428Z" }, - { url = "https://files.pythonhosted.org/packages/cd/95/34b4f483643d0cb11a1f830b72c03fdd18dbd3792d77a2eb2e130a96fada/fonttools-4.58.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f2b4e6fd45edc6805f5f2c355590b092ffc7e10a945bd6a569fc66c1d2ae7aa", size = 4941633, upload-time = "2025-06-13T17:24:30.568Z" }, - { url = "https://files.pythonhosted.org/packages/81/ac/9bafbdb7694059c960de523e643fa5a61dd2f698f3f72c0ca18ae99257c7/fonttools-4.58.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f155b927f6efb1213a79334e4cb9904d1e18973376ffc17a0d7cd43d31981f1e", size = 4886170, upload-time = "2025-06-13T17:24:32.724Z" }, - { url = "https://files.pythonhosted.org/packages/ae/44/a3a3b70d5709405f7525bb7cb497b4e46151e0c02e3c8a0e40e5e9fe030b/fonttools-4.58.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e38f687d5de97c7fb7da3e58169fb5ba349e464e141f83c3c2e2beb91d317816", size = 5037851, upload-time = "2025-06-13T17:24:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/21/cb/e8923d197c78969454eb876a4a55a07b59c9c4c46598f02b02411dc3b45c/fonttools-4.58.4-cp312-cp312-win32.whl", hash = "sha256:636c073b4da9db053aa683db99580cac0f7c213a953b678f69acbca3443c12cc", size = 2187428, upload-time = "2025-06-13T17:24:36.996Z" }, - { url = "https://files.pythonhosted.org/packages/46/e6/fe50183b1a0e1018e7487ee740fa8bb127b9f5075a41e20d017201e8ab14/fonttools-4.58.4-cp312-cp312-win_amd64.whl", hash = "sha256:82e8470535743409b30913ba2822e20077acf9ea70acec40b10fcf5671dceb58", size = 2236649, upload-time = "2025-06-13T17:24:38.985Z" }, - { url = "https://files.pythonhosted.org/packages/d4/4f/c05cab5fc1a4293e6bc535c6cb272607155a0517700f5418a4165b7f9ec8/fonttools-4.58.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5f4a64846495c543796fa59b90b7a7a9dff6839bd852741ab35a71994d685c6d", size = 2745197, upload-time = "2025-06-13T17:24:40.645Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d3/49211b1f96ae49308f4f78ca7664742377a6867f00f704cdb31b57e4b432/fonttools-4.58.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e80661793a5d4d7ad132a2aa1eae2e160fbdbb50831a0edf37c7c63b2ed36574", size = 2317272, upload-time = "2025-06-13T17:24:43.428Z" }, - { url = "https://files.pythonhosted.org/packages/b2/11/c9972e46a6abd752a40a46960e431c795ad1f306775fc1f9e8c3081a1274/fonttools-4.58.4-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fe5807fc64e4ba5130f1974c045a6e8d795f3b7fb6debfa511d1773290dbb76b", size = 4877184, upload-time = "2025-06-13T17:24:45.527Z" }, - { url = "https://files.pythonhosted.org/packages/ea/24/5017c01c9ef8df572cc9eaf9f12be83ad8ed722ff6dc67991d3d752956e4/fonttools-4.58.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b610b9bef841cb8f4b50472494158b1e347d15cad56eac414c722eda695a6cfd", size = 4939445, upload-time = "2025-06-13T17:24:47.647Z" }, - { url = "https://files.pythonhosted.org/packages/79/b0/538cc4d0284b5a8826b4abed93a69db52e358525d4b55c47c8cef3669767/fonttools-4.58.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2daa7f0e213c38f05f054eb5e1730bd0424aebddbeac094489ea1585807dd187", size = 4878800, upload-time = "2025-06-13T17:24:49.766Z" }, - { url = "https://files.pythonhosted.org/packages/5a/9b/a891446b7a8250e65bffceb248508587958a94db467ffd33972723ab86c9/fonttools-4.58.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:66cccb6c0b944496b7f26450e9a66e997739c513ffaac728d24930df2fd9d35b", size = 5021259, upload-time = "2025-06-13T17:24:51.754Z" }, - { url = "https://files.pythonhosted.org/packages/17/b2/c4d2872cff3ace3ddd1388bf15b76a1d8d5313f0a61f234e9aed287e674d/fonttools-4.58.4-cp313-cp313-win32.whl", hash = "sha256:94d2aebb5ca59a5107825520fde596e344652c1f18170ef01dacbe48fa60c889", size = 2185824, upload-time = "2025-06-13T17:24:54.324Z" }, - { url = "https://files.pythonhosted.org/packages/98/57/cddf8bcc911d4f47dfca1956c1e3aeeb9f7c9b8e88b2a312fe8c22714e0b/fonttools-4.58.4-cp313-cp313-win_amd64.whl", hash = "sha256:b554bd6e80bba582fd326ddab296e563c20c64dca816d5e30489760e0c41529f", size = 2236382, upload-time = "2025-06-13T17:24:56.291Z" }, - { url = "https://files.pythonhosted.org/packages/45/20/787d70ba4cb831706fa587c56ee472a88ebc28752be660f4b58e598af6fc/fonttools-4.58.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca773fe7812e4e1197ee4e63b9691e89650ab55f679e12ac86052d2fe0d152cd", size = 2754537, upload-time = "2025-06-13T17:24:57.851Z" }, - { url = "https://files.pythonhosted.org/packages/4d/a5/ccb7ef1b8ab4bbf48f7753b6df512b61e73af82cd27aa486a03d6afb8635/fonttools-4.58.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e31289101221910f44245472e02b1a2f7d671c6d06a45c07b354ecb25829ad92", size = 2321715, upload-time = "2025-06-13T17:24:59.863Z" }, - { url = "https://files.pythonhosted.org/packages/20/5c/b361a7eae95950afaadb7049f55b214b619cb5368086cb3253726fe0c478/fonttools-4.58.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90c9e3c01475bb9602cb617f69f02c4ba7ab7784d93f0b0d685e84286f4c1a10", size = 4819004, upload-time = "2025-06-13T17:25:01.591Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2f/3006fbb1f57704cd60af82fb8127788cfb102f12d39c39fb5996af595cf3/fonttools-4.58.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e00a826f2bc745a010341ac102082fe5e3fb9f0861b90ed9ff32277598813711", size = 4749072, upload-time = "2025-06-13T17:25:03.334Z" }, - { url = "https://files.pythonhosted.org/packages/c2/42/ea79e2c3d5e4441e4508d6456b268a7de275452f3dba3a13fc9d73f3e03d/fonttools-4.58.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc75e72e9d2a4ad0935c59713bd38679d51c6fefab1eadde80e3ed4c2a11ea84", size = 4802023, upload-time = "2025-06-13T17:25:05.486Z" }, - { url = "https://files.pythonhosted.org/packages/d4/70/90a196f57faa2bcd1485710c6d08eedceca500cdf2166640b3478e72072c/fonttools-4.58.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f57a795e540059ce3de68508acfaaf177899b39c36ef0a2833b2308db98c71f1", size = 4911103, upload-time = "2025-06-13T17:25:07.505Z" }, - { url = "https://files.pythonhosted.org/packages/cb/3f/a7d38e606e98701dbcb6198406c8b554a77ed06c5b21e425251813fd3775/fonttools-4.58.4-cp39-cp39-win32.whl", hash = "sha256:a7d04f64c88b48ede655abcf76f2b2952f04933567884d99be7c89e0a4495131", size = 1471393, upload-time = "2025-06-13T17:25:09.587Z" }, - { url = "https://files.pythonhosted.org/packages/37/6e/08158deaebeb5b0c7a0fb251ca6827defb5f5159958a23ba427e0b677e95/fonttools-4.58.4-cp39-cp39-win_amd64.whl", hash = "sha256:5a8bc5dfd425c89b1c38380bc138787b0a830f761b82b37139aa080915503b69", size = 1515901, upload-time = "2025-06-13T17:25:11.336Z" }, - { url = "https://files.pythonhosted.org/packages/0b/2f/c536b5b9bb3c071e91d536a4d11f969e911dbb6b227939f4c5b0bca090df/fonttools-4.58.4-py3-none-any.whl", hash = "sha256:a10ce13a13f26cbb9f37512a4346bb437ad7e002ff6fa966a7ce7ff5ac3528bd", size = 1114660, upload-time = "2025-06-13T17:25:13.321Z" }, +version = "4.59.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/7f/29c9c3fe4246f6ad96fee52b88d0dc3a863c7563b0afc959e36d78b965dc/fonttools-4.59.1.tar.gz", hash = "sha256:74995b402ad09822a4c8002438e54940d9f1ecda898d2bb057729d7da983e4cb", size = 3534394, upload-time = "2025-08-14T16:28:14.266Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/da/d66e5678802b2b662fd62908bf88b78d00bfb62de51660f270cf0dfce333/fonttools-4.59.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e90a89e52deb56b928e761bb5b5f65f13f669bfd96ed5962975debea09776a23", size = 2758395, upload-time = "2025-08-14T16:26:10.239Z" }, + { url = "https://files.pythonhosted.org/packages/96/74/d70a42bcc9ffa40a63e81417535b2849a702bd88f38bc2ed994ae86a2e74/fonttools-4.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d29ab70658d2ec19422b25e6ace00a0b0ae4181ee31e03335eaef53907d2d83", size = 2331647, upload-time = "2025-08-14T16:26:13.399Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f6/4a13657c9ca134ac62d9a68e4b3412b95b059537eab459cc1df653f45862/fonttools-4.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f9721a564978a10d5c12927f99170d18e9a32e5a727c61eae56f956a4d118b", size = 4846293, upload-time = "2025-08-14T16:26:15.586Z" }, + { url = "https://files.pythonhosted.org/packages/69/e3/9f0c8c30eaea5b2d891bd95b000381b3b2dcaa89b5a064cce25157aba973/fonttools-4.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8c8758a7d97848fc8b514b3d9b4cb95243714b2f838dde5e1e3c007375de6214", size = 4776105, upload-time = "2025-08-14T16:26:17.624Z" }, + { url = "https://files.pythonhosted.org/packages/e2/73/1e6a06e2eecdc7b054b035507694b4f480e83b94dcb0d19f8a010d95350a/fonttools-4.59.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2aeb829ad9d41a2ef17cab8bb5d186049ba38a840f10352e654aa9062ec32dc1", size = 4825142, upload-time = "2025-08-14T16:26:19.936Z" }, + { url = "https://files.pythonhosted.org/packages/72/7d/a512521ec44c37bda27d08193e79e48a510a073554c30400ccc600494830/fonttools-4.59.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac216a2980a2d2b3b88c68a24f8a9bfb203e2490e991b3238502ad8f1e7bfed0", size = 4935220, upload-time = "2025-08-14T16:26:22.22Z" }, + { url = "https://files.pythonhosted.org/packages/62/f1/71f9a9c4e5df44d861975538a5c56b58f1662cd32ebbea5a02eb86028fc1/fonttools-4.59.1-cp310-cp310-win32.whl", hash = "sha256:d31dc137ed8ec71dbc446949eba9035926e6e967b90378805dcf667ff57cabb1", size = 2216883, upload-time = "2025-08-14T16:26:24.037Z" }, + { url = "https://files.pythonhosted.org/packages/f9/6d/92b2e3e0350bb3ef88024ae19513c12cee61896220e3df421c47a439af28/fonttools-4.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:5265bc52ed447187d39891b5f21d7217722735d0de9fe81326566570d12851a9", size = 2261310, upload-time = "2025-08-14T16:26:26.184Z" }, + { url = "https://files.pythonhosted.org/packages/34/62/9667599561f623d4a523cc9eb4f66f3b94b6155464110fa9aebbf90bbec7/fonttools-4.59.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4909cce2e35706f3d18c54d3dcce0414ba5e0fb436a454dffec459c61653b513", size = 2778815, upload-time = "2025-08-14T16:26:28.484Z" }, + { url = "https://files.pythonhosted.org/packages/8f/78/cc25bcb2ce86033a9df243418d175e58f1956a35047c685ef553acae67d6/fonttools-4.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbec204fa9f877641747f2d9612b2b656071390d7a7ef07a9dbf0ecf9c7195c", size = 2341631, upload-time = "2025-08-14T16:26:30.396Z" }, + { url = "https://files.pythonhosted.org/packages/a4/cc/fcbb606dd6871f457ac32f281c20bcd6cc77d9fce77b5a4e2b2afab1f500/fonttools-4.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39dfd42cc2dc647b2c5469bc7a5b234d9a49e72565b96dd14ae6f11c2c59ef15", size = 5022222, upload-time = "2025-08-14T16:26:32.447Z" }, + { url = "https://files.pythonhosted.org/packages/61/96/c0b1cf2b74d08eb616a80dbf5564351fe4686147291a25f7dce8ace51eb3/fonttools-4.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b11bc177a0d428b37890825d7d025040d591aa833f85f8d8878ed183354f47df", size = 4966512, upload-time = "2025-08-14T16:26:34.621Z" }, + { url = "https://files.pythonhosted.org/packages/a4/26/51ce2e3e0835ffc2562b1b11d1fb9dafd0aca89c9041b64a9e903790a761/fonttools-4.59.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b9b4c35b3be45e5bc774d3fc9608bbf4f9a8d371103b858c80edbeed31dd5aa", size = 5001645, upload-time = "2025-08-14T16:26:36.876Z" }, + { url = "https://files.pythonhosted.org/packages/36/11/ef0b23f4266349b6d5ccbd1a07b7adc998d5bce925792aa5d1ec33f593e3/fonttools-4.59.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:01158376b8a418a0bae9625c476cebfcfcb5e6761e9d243b219cd58341e7afbb", size = 5113777, upload-time = "2025-08-14T16:26:39.002Z" }, + { url = "https://files.pythonhosted.org/packages/d0/da/b398fe61ef433da0a0472cdb5d4399124f7581ffe1a31b6242c91477d802/fonttools-4.59.1-cp311-cp311-win32.whl", hash = "sha256:cf7c5089d37787387123f1cb8f1793a47c5e1e3d1e4e7bfbc1cc96e0f925eabe", size = 2215076, upload-time = "2025-08-14T16:26:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/94/bd/e2624d06ab94e41c7c77727b2941f1baed7edb647e63503953e6888020c9/fonttools-4.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:c866eef7a0ba320486ade6c32bfc12813d1a5db8567e6904fb56d3d40acc5116", size = 2262779, upload-time = "2025-08-14T16:26:43.483Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fe/6e069cc4cb8881d164a9bd956e9df555bc62d3eb36f6282e43440200009c/fonttools-4.59.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:43ab814bbba5f02a93a152ee61a04182bb5809bd2bc3609f7822e12c53ae2c91", size = 2769172, upload-time = "2025-08-14T16:26:45.729Z" }, + { url = "https://files.pythonhosted.org/packages/b9/98/ec4e03f748fefa0dd72d9d95235aff6fef16601267f4a2340f0e16b9330f/fonttools-4.59.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4f04c3ffbfa0baafcbc550657cf83657034eb63304d27b05cff1653b448ccff6", size = 2337281, upload-time = "2025-08-14T16:26:47.921Z" }, + { url = "https://files.pythonhosted.org/packages/8b/b1/890360a7e3d04a30ba50b267aca2783f4c1364363797e892e78a4f036076/fonttools-4.59.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d601b153e51a5a6221f0d4ec077b6bfc6ac35bfe6c19aeaa233d8990b2b71726", size = 4909215, upload-time = "2025-08-14T16:26:49.682Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ec/2490599550d6c9c97a44c1e36ef4de52d6acf742359eaa385735e30c05c4/fonttools-4.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c735e385e30278c54f43a0d056736942023c9043f84ee1021eff9fd616d17693", size = 4951958, upload-time = "2025-08-14T16:26:51.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/bd053f6f7634234a9b9805ff8ae4f32df4f2168bee23cafd1271ba9915a9/fonttools-4.59.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1017413cdc8555dce7ee23720da490282ab7ec1cf022af90a241f33f9a49afc4", size = 4894738, upload-time = "2025-08-14T16:26:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a1/3cd12a010d288325a7cfcf298a84825f0f9c29b01dee1baba64edfe89257/fonttools-4.59.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5c6d8d773470a5107052874341ed3c487c16ecd179976d81afed89dea5cd7406", size = 5045983, upload-time = "2025-08-14T16:26:56.153Z" }, + { url = "https://files.pythonhosted.org/packages/a2/af/8a2c3f6619cc43cf87951405337cc8460d08a4e717bb05eaa94b335d11dc/fonttools-4.59.1-cp312-cp312-win32.whl", hash = "sha256:2a2d0d33307f6ad3a2086a95dd607c202ea8852fa9fb52af9b48811154d1428a", size = 2203407, upload-time = "2025-08-14T16:26:58.165Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f2/a19b874ddbd3ebcf11d7e25188ef9ac3f68b9219c62263acb34aca8cde05/fonttools-4.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:0b9e4fa7eaf046ed6ac470f6033d52c052481ff7a6e0a92373d14f556f298dc0", size = 2251561, upload-time = "2025-08-14T16:27:00.646Z" }, + { url = "https://files.pythonhosted.org/packages/19/5e/94a4d7f36c36e82f6a81e0064d148542e0ad3e6cf51fc5461ca128f3658d/fonttools-4.59.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:89d9957b54246c6251345297dddf77a84d2c19df96af30d2de24093bbdf0528b", size = 2760192, upload-time = "2025-08-14T16:27:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ee/a5/f50712fc33ef9d06953c660cefaf8c8fe4b8bc74fa21f44ee5e4f9739439/fonttools-4.59.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8156b11c0d5405810d216f53907bd0f8b982aa5f1e7e3127ab3be1a4062154ff", size = 2332694, upload-time = "2025-08-14T16:27:04.883Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a2/5a9fc21c354bf8613215ce233ab0d933bd17d5ff4c29693636551adbc7b3/fonttools-4.59.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8387876a8011caec52d327d5e5bca705d9399ec4b17afb8b431ec50d47c17d23", size = 4889254, upload-time = "2025-08-14T16:27:07.02Z" }, + { url = "https://files.pythonhosted.org/packages/2d/e5/54a6dc811eba018d022ca2e8bd6f2969291f9586ccf9a22a05fc55f91250/fonttools-4.59.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb13823a74b3a9204a8ed76d3d6d5ec12e64cc5bc44914eb9ff1cdac04facd43", size = 4949109, upload-time = "2025-08-14T16:27:09.3Z" }, + { url = "https://files.pythonhosted.org/packages/db/15/b05c72a248a95bea0fd05fbd95acdf0742945942143fcf961343b7a3663a/fonttools-4.59.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e1ca10da138c300f768bb68e40e5b20b6ecfbd95f91aac4cc15010b6b9d65455", size = 4888428, upload-time = "2025-08-14T16:27:11.514Z" }, + { url = "https://files.pythonhosted.org/packages/63/71/c7d6840f858d695adc0c4371ec45e3fb1c8e060b276ba944e2800495aca4/fonttools-4.59.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2beb5bfc4887a3130f8625349605a3a45fe345655ce6031d1bac11017454b943", size = 5032668, upload-time = "2025-08-14T16:27:13.872Z" }, + { url = "https://files.pythonhosted.org/packages/90/54/57be4aca6f1312e2bc4d811200dd822325794e05bdb26eeff0976edca651/fonttools-4.59.1-cp313-cp313-win32.whl", hash = "sha256:419f16d750d78e6d704bfe97b48bba2f73b15c9418f817d0cb8a9ca87a5b94bf", size = 2201832, upload-time = "2025-08-14T16:27:16.126Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1f/1899a6175a5f900ed8730a0d64f53ca1b596ed7609bfda033cf659114258/fonttools-4.59.1-cp313-cp313-win_amd64.whl", hash = "sha256:c536f8a852e8d3fa71dde1ec03892aee50be59f7154b533f0bf3c1174cfd5126", size = 2250673, upload-time = "2025-08-14T16:27:18.033Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/f6ba82c22f118d9985c37fea65d8d715ca71300d78b6c6e90874dc59f11d/fonttools-4.59.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d5c3bfdc9663f3d4b565f9cb3b8c1efb3e178186435b45105bde7328cfddd7fe", size = 2758606, upload-time = "2025-08-14T16:27:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/84aa3d0ce27b0112c28b67b637ff7a47cf401cf5fbfee6476e4bc9777580/fonttools-4.59.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ea03f1da0d722fe3c2278a05957e6550175571a4894fbf9d178ceef4a3783d2b", size = 2330187, upload-time = "2025-08-14T16:27:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/17/41/b3ba43f78afb321e2e50232c87304c8d0f5ab39b64389b8286cc39cdb824/fonttools-4.59.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:57a3708ca6bfccb790f585fa6d8f29432ec329618a09ff94c16bcb3c55994643", size = 4832020, upload-time = "2025-08-14T16:27:24.214Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/3af871c7fb325a68938e7ce544ca48bfd2c6bb7b357f3c8252933b29100a/fonttools-4.59.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:729367c91eb1ee84e61a733acc485065a00590618ca31c438e7dd4d600c01486", size = 4930687, upload-time = "2025-08-14T16:27:26.484Z" }, + { url = "https://files.pythonhosted.org/packages/c5/4f/299fc44646b30d9ef03ffaa78b109c7bd32121f0d8f10009ee73ac4514bc/fonttools-4.59.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f8ef66ac6db450193ed150e10b3b45dde7aded10c5d279968bc63368027f62b", size = 4875794, upload-time = "2025-08-14T16:27:28.887Z" }, + { url = "https://files.pythonhosted.org/packages/90/cf/a0a3d763ab58f5f81ceff104ddb662fd9da94248694862b9c6cbd509fdd5/fonttools-4.59.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:075f745d539a998cd92cb84c339a82e53e49114ec62aaea8307c80d3ad3aef3a", size = 4985780, upload-time = "2025-08-14T16:27:30.858Z" }, + { url = "https://files.pythonhosted.org/packages/72/c5/ba76511aaae143d89c29cd32ce30bafb61c477e8759a1590b8483f8065f8/fonttools-4.59.1-cp314-cp314-win32.whl", hash = "sha256:c2b0597522d4c5bb18aa5cf258746a2d4a90f25878cbe865e4d35526abd1b9fc", size = 2205610, upload-time = "2025-08-14T16:27:32.578Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/b250e69d6caf35bc65cddbf608be0662d741c248f2e7503ab01081fc267e/fonttools-4.59.1-cp314-cp314-win_amd64.whl", hash = "sha256:e9ad4ce044e3236f0814c906ccce8647046cc557539661e35211faadf76f283b", size = 2255376, upload-time = "2025-08-14T16:27:34.653Z" }, + { url = "https://files.pythonhosted.org/packages/11/f3/0bc63a23ac0f8175e23d82f85d6ee693fbd849de7ad739f0a3622182ad29/fonttools-4.59.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:652159e8214eb4856e8387ebcd6b6bd336ee258cbeb639c8be52005b122b9609", size = 2826546, upload-time = "2025-08-14T16:27:36.783Z" }, + { url = "https://files.pythonhosted.org/packages/e9/46/a3968205590e068fdf60e926be329a207782576cb584d3b7dcd2d2844957/fonttools-4.59.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:43d177cd0e847ea026fedd9f099dc917da136ed8792d142298a252836390c478", size = 2359771, upload-time = "2025-08-14T16:27:39.678Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ff/d14b4c283879e8cb57862d9624a34fe6522b6fcdd46ccbfc58900958794a/fonttools-4.59.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e54437651e1440ee53a95e6ceb6ee440b67a3d348c76f45f4f48de1a5ecab019", size = 4831575, upload-time = "2025-08-14T16:27:41.885Z" }, + { url = "https://files.pythonhosted.org/packages/9c/04/a277d9a584a49d98ca12d3b2c6663bdf333ae97aaa83bd0cdabf7c5a6c84/fonttools-4.59.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6065fdec8ff44c32a483fd44abe5bcdb40dd5e2571a5034b555348f2b3a52cea", size = 5069962, upload-time = "2025-08-14T16:27:44.284Z" }, + { url = "https://files.pythonhosted.org/packages/16/6f/3d2ae69d96c4cdee6dfe7598ca5519a1514487700ca3d7c49c5a1ad65308/fonttools-4.59.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42052b56d176f8b315fbc09259439c013c0cb2109df72447148aeda677599612", size = 4942926, upload-time = "2025-08-14T16:27:46.523Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d3/c17379e0048d03ce26b38e4ab0e9a98280395b00529e093fe2d663ac0658/fonttools-4.59.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bcd52eaa5c4c593ae9f447c1d13e7e4a00ca21d755645efa660b6999425b3c88", size = 4958678, upload-time = "2025-08-14T16:27:48.555Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3f/c5543a1540abdfb4d375e3ebeb84de365ab9b153ec14cb7db05f537dd1e7/fonttools-4.59.1-cp314-cp314t-win32.whl", hash = "sha256:02e4fdf27c550dded10fe038a5981c29f81cb9bc649ff2eaa48e80dab8998f97", size = 2266706, upload-time = "2025-08-14T16:27:50.556Z" }, + { url = "https://files.pythonhosted.org/packages/3e/99/85bff6e674226bc8402f983e365f07e76d990e7220ba72bcc738fef52391/fonttools-4.59.1-cp314-cp314t-win_amd64.whl", hash = "sha256:412a5fd6345872a7c249dac5bcce380393f40c1c316ac07f447bc17d51900922", size = 2329994, upload-time = "2025-08-14T16:27:52.36Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/99965234df564a14fd1527fe69252cfacce3dbcdcad50bada4b04aa6ebf6/fonttools-4.59.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ab4c1fb45f2984b8b4a3face7cff0f67f9766e9414cbb6fd061e9d77819de98", size = 2762863, upload-time = "2025-08-14T16:27:54.376Z" }, + { url = "https://files.pythonhosted.org/packages/e6/e6/0e2009312a95cf2926a61399074daec4abb54287d194e1988f3ae40d197f/fonttools-4.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ee39da0227950f88626c91e219659e6cd725ede826b1c13edd85fc4cec9bbe6", size = 2333837, upload-time = "2025-08-14T16:27:56.374Z" }, + { url = "https://files.pythonhosted.org/packages/72/df/63d586609002057d3e214f7d45c0c2bf0a5c5030f61d282dc39943546199/fonttools-4.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58a8844f96cff35860647a65345bfca87f47a2494bfb4bef754e58c082511443", size = 4822086, upload-time = "2025-08-14T16:27:58.659Z" }, + { url = "https://files.pythonhosted.org/packages/d2/82/d019c4499202c22fc6d8d122aad1175d2541289be76b8888546ac1f71d4a/fonttools-4.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f021cea6e36410874763f4a517a5e2d6ac36ca8f95521f3a9fdaad0fe73dc", size = 4755144, upload-time = "2025-08-14T16:28:00.71Z" }, + { url = "https://files.pythonhosted.org/packages/7d/32/130226b1da79fc67f623707bd20394ea87ca1ea9d858fa0e8e7d38847ce7/fonttools-4.59.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf5fb864f80061a40c1747e0dbc4f6e738de58dd6675b07eb80bd06a93b063c4", size = 4804680, upload-time = "2025-08-14T16:28:02.676Z" }, + { url = "https://files.pythonhosted.org/packages/fd/8c/73a4af65e2318d63719354730a700c21a9e271e74789c83d9d5d77621fcf/fonttools-4.59.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c29ea087843e27a7cffc78406d32a5abf166d92afde7890394e9e079c9b4dbe9", size = 4914623, upload-time = "2025-08-14T16:28:04.917Z" }, + { url = "https://files.pythonhosted.org/packages/a8/af/0763b655c9b713c70a9081a322e5411c4c85222471d0b7d6730cfcccbae9/fonttools-4.59.1-cp39-cp39-win32.whl", hash = "sha256:a960b09ff50c2e87864e83f352e5a90bcf1ad5233df579b1124660e1643de272", size = 1485466, upload-time = "2025-08-14T16:28:07.195Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b2/c2bb086f0525b6c458f2b72b994341f8ae1d5dac556e3073c70b2c973685/fonttools-4.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:e3680884189e2b7c3549f6d304376e64711fd15118e4b1ae81940cb6b1eaa267", size = 1529833, upload-time = "2025-08-14T16:28:09.88Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/9d606e66d498917cd7a2ff24f558010d42d6fd4576d9dd57f0bd98333f5a/fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042", size = 1130094, upload-time = "2025-08-14T16:28:12.048Z" }, ] [[package]] @@ -859,23 +977,24 @@ wheels = [ [[package]] name = "gitpython" -version = "3.1.44" +version = "3.1.45" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196, upload-time = "2025-01-02T07:32:43.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599, upload-time = "2025-01-02T07:32:40.731Z" }, + { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, ] [[package]] name = "identify" -version = "2.6.12" +version = "2.6.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ca/ffbabe3635bb839aa36b3a893c91a9b0d368cb4d8073e03a12896970af82/identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32", size = 99243, upload-time = "2025-08-09T19:35:00.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ce/461b60a3ee109518c055953729bf9ed089a04db895d47e95444071dcdef2/identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b", size = 99153, upload-time = "2025-08-09T19:34:59.1Z" }, ] [[package]] @@ -922,7 +1041,7 @@ wheels = [ [[package]] name = "ipykernel" -version = "6.29.5" +version = "6.30.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "appnope", marker = "sys_platform == 'darwin'" }, @@ -930,7 +1049,7 @@ dependencies = [ { name = "debugpy" }, { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython", version = "9.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -941,9 +1060,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367, upload-time = "2024-07-01T14:07:22.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/76/11082e338e0daadc89c8ff866185de11daf67d181901038f9e139d109761/ipykernel-6.30.1.tar.gz", hash = "sha256:6abb270161896402e76b91394fcdce5d1be5d45f456671e5080572f8505be39b", size = 166260, upload-time = "2025-08-04T15:47:35.018Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173, upload-time = "2024-07-01T14:07:19.603Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c7/b445faca8deb954fe536abebff4ece5b097b923de482b26e78448c89d1dd/ipykernel-6.30.1-py3-none-any.whl", hash = "sha256:aa6b9fb93dca949069d8b85b6c79b2518e32ac583ae9c7d37c51d119e18b3fb4", size = 117484, upload-time = "2025-08-04T15:47:32.622Z" }, ] [[package]] @@ -998,7 +1117,7 @@ wheels = [ [[package]] name = "ipython" -version = "9.3.0" +version = "9.4.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", @@ -1017,9 +1136,9 @@ dependencies = [ { name = "traitlets", marker = "python_full_version >= '3.11'" }, { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/09/4c7e06b96fbd203e06567b60fb41b06db606b6a82db6db7b2c85bb72a15c/ipython-9.3.0.tar.gz", hash = "sha256:79eb896f9f23f50ad16c3bc205f686f6e030ad246cc309c6279a242b14afe9d8", size = 4426460, upload-time = "2025-05-31T16:34:55.678Z" } +sdist = { url = "https://files.pythonhosted.org/packages/54/80/406f9e3bde1c1fd9bf5a0be9d090f8ae623e401b7670d8f6fdf2ab679891/ipython-9.4.0.tar.gz", hash = "sha256:c033c6d4e7914c3d9768aabe76bbe87ba1dc66a92a05db6bfa1125d81f2ee270", size = 4385338, upload-time = "2025-07-01T11:11:30.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/99/9ed3d52d00f1846679e3aa12e2326ac7044b5e7f90dc822b60115fa533ca/ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04", size = 605320, upload-time = "2025-05-31T16:34:52.154Z" }, + { url = "https://files.pythonhosted.org/packages/63/f8/0031ee2b906a15a33d6bfc12dd09c3dfa966b3cb5b284ecfb7549e6ac3c4/ipython-9.4.0-py3-none-any.whl", hash = "sha256:25850f025a446d9b359e8d296ba175a36aedd32e83ca9b5060430fe16801f066", size = 611021, upload-time = "2025-07-01T11:11:27.85Z" }, ] [[package]] @@ -1048,11 +1167,11 @@ wheels = [ [[package]] name = "json5" -version = "0.12.0" +version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/be/c6c745ec4c4539b25a278b70e29793f10382947df0d9efba2fa09120895d/json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a", size = 51907, upload-time = "2025-04-03T16:33:13.201Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/ae/929aee9619e9eba9015207a9d2c1c54db18311da7eb4dcf6d41ad6f0eb67/json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990", size = 52191, upload-time = "2025-08-12T19:47:42.583Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/9f/3500910d5a98549e3098807493851eeef2b89cdd3032227558a104dfe926/json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db", size = 36079, upload-time = "2025-04-03T16:33:11.927Z" }, + { url = "https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size = 36119, upload-time = "2025-08-12T19:47:41.131Z" }, ] [[package]] @@ -1191,94 +1310,115 @@ wheels = [ [[package]] name = "kiwisolver" -version = "1.4.8" +version = "1.4.9" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/82/59/7c91426a8ac292e1cdd53a63b6d9439abd573c875c3f92c146767dd33faf/kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e", size = 97538, upload-time = "2024-12-24T18:30:51.519Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/5f/4d8e9e852d98ecd26cdf8eaf7ed8bc33174033bba5e07001b289f07308fd/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db", size = 124623, upload-time = "2024-12-24T18:28:17.687Z" }, - { url = "https://files.pythonhosted.org/packages/1d/70/7f5af2a18a76fe92ea14675f8bd88ce53ee79e37900fa5f1a1d8e0b42998/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b", size = 66720, upload-time = "2024-12-24T18:28:19.158Z" }, - { url = "https://files.pythonhosted.org/packages/c6/13/e15f804a142353aefd089fadc8f1d985561a15358c97aca27b0979cb0785/kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d", size = 65413, upload-time = "2024-12-24T18:28:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/ce/6d/67d36c4d2054e83fb875c6b59d0809d5c530de8148846b1370475eeeece9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d", size = 1650826, upload-time = "2024-12-24T18:28:21.203Z" }, - { url = "https://files.pythonhosted.org/packages/de/c6/7b9bb8044e150d4d1558423a1568e4f227193662a02231064e3824f37e0a/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c", size = 1628231, upload-time = "2024-12-24T18:28:23.851Z" }, - { url = "https://files.pythonhosted.org/packages/b6/38/ad10d437563063eaaedbe2c3540a71101fc7fb07a7e71f855e93ea4de605/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3", size = 1408938, upload-time = "2024-12-24T18:28:26.687Z" }, - { url = "https://files.pythonhosted.org/packages/52/ce/c0106b3bd7f9e665c5f5bc1e07cc95b5dabd4e08e3dad42dbe2faad467e7/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed", size = 1422799, upload-time = "2024-12-24T18:28:30.538Z" }, - { url = "https://files.pythonhosted.org/packages/d0/87/efb704b1d75dc9758087ba374c0f23d3254505edaedd09cf9d247f7878b9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f", size = 1354362, upload-time = "2024-12-24T18:28:32.943Z" }, - { url = "https://files.pythonhosted.org/packages/eb/b3/fd760dc214ec9a8f208b99e42e8f0130ff4b384eca8b29dd0efc62052176/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff", size = 2222695, upload-time = "2024-12-24T18:28:35.641Z" }, - { url = "https://files.pythonhosted.org/packages/a2/09/a27fb36cca3fc01700687cc45dae7a6a5f8eeb5f657b9f710f788748e10d/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d", size = 2370802, upload-time = "2024-12-24T18:28:38.357Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c3/ba0a0346db35fe4dc1f2f2cf8b99362fbb922d7562e5f911f7ce7a7b60fa/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c", size = 2334646, upload-time = "2024-12-24T18:28:40.941Z" }, - { url = "https://files.pythonhosted.org/packages/41/52/942cf69e562f5ed253ac67d5c92a693745f0bed3c81f49fc0cbebe4d6b00/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605", size = 2467260, upload-time = "2024-12-24T18:28:42.273Z" }, - { url = "https://files.pythonhosted.org/packages/32/26/2d9668f30d8a494b0411d4d7d4ea1345ba12deb6a75274d58dd6ea01e951/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e", size = 2288633, upload-time = "2024-12-24T18:28:44.87Z" }, - { url = "https://files.pythonhosted.org/packages/98/99/0dd05071654aa44fe5d5e350729961e7bb535372935a45ac89a8924316e6/kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751", size = 71885, upload-time = "2024-12-24T18:28:47.346Z" }, - { url = "https://files.pythonhosted.org/packages/6c/fc/822e532262a97442989335394d441cd1d0448c2e46d26d3e04efca84df22/kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271", size = 65175, upload-time = "2024-12-24T18:28:49.651Z" }, - { url = "https://files.pythonhosted.org/packages/da/ed/c913ee28936c371418cb167b128066ffb20bbf37771eecc2c97edf8a6e4c/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84", size = 124635, upload-time = "2024-12-24T18:28:51.826Z" }, - { url = "https://files.pythonhosted.org/packages/4c/45/4a7f896f7467aaf5f56ef093d1f329346f3b594e77c6a3c327b2d415f521/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561", size = 66717, upload-time = "2024-12-24T18:28:54.256Z" }, - { url = "https://files.pythonhosted.org/packages/5f/b4/c12b3ac0852a3a68f94598d4c8d569f55361beef6159dce4e7b624160da2/kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7", size = 65413, upload-time = "2024-12-24T18:28:55.184Z" }, - { url = "https://files.pythonhosted.org/packages/a9/98/1df4089b1ed23d83d410adfdc5947245c753bddfbe06541c4aae330e9e70/kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03", size = 1343994, upload-time = "2024-12-24T18:28:57.493Z" }, - { url = "https://files.pythonhosted.org/packages/8d/bf/b4b169b050c8421a7c53ea1ea74e4ef9c335ee9013216c558a047f162d20/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954", size = 1434804, upload-time = "2024-12-24T18:29:00.077Z" }, - { url = "https://files.pythonhosted.org/packages/66/5a/e13bd341fbcf73325ea60fdc8af752addf75c5079867af2e04cc41f34434/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79", size = 1450690, upload-time = "2024-12-24T18:29:01.401Z" }, - { url = "https://files.pythonhosted.org/packages/9b/4f/5955dcb376ba4a830384cc6fab7d7547bd6759fe75a09564910e9e3bb8ea/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6", size = 1376839, upload-time = "2024-12-24T18:29:02.685Z" }, - { url = "https://files.pythonhosted.org/packages/3a/97/5edbed69a9d0caa2e4aa616ae7df8127e10f6586940aa683a496c2c280b9/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0", size = 1435109, upload-time = "2024-12-24T18:29:04.113Z" }, - { url = "https://files.pythonhosted.org/packages/13/fc/e756382cb64e556af6c1809a1bbb22c141bbc2445049f2da06b420fe52bf/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab", size = 2245269, upload-time = "2024-12-24T18:29:05.488Z" }, - { url = "https://files.pythonhosted.org/packages/76/15/e59e45829d7f41c776d138245cabae6515cb4eb44b418f6d4109c478b481/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc", size = 2393468, upload-time = "2024-12-24T18:29:06.79Z" }, - { url = "https://files.pythonhosted.org/packages/e9/39/483558c2a913ab8384d6e4b66a932406f87c95a6080112433da5ed668559/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25", size = 2355394, upload-time = "2024-12-24T18:29:08.24Z" }, - { url = "https://files.pythonhosted.org/packages/01/aa/efad1fbca6570a161d29224f14b082960c7e08268a133fe5dc0f6906820e/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc", size = 2490901, upload-time = "2024-12-24T18:29:09.653Z" }, - { url = "https://files.pythonhosted.org/packages/c9/4f/15988966ba46bcd5ab9d0c8296914436720dd67fca689ae1a75b4ec1c72f/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67", size = 2312306, upload-time = "2024-12-24T18:29:12.644Z" }, - { url = "https://files.pythonhosted.org/packages/2d/27/bdf1c769c83f74d98cbc34483a972f221440703054894a37d174fba8aa68/kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34", size = 71966, upload-time = "2024-12-24T18:29:14.089Z" }, - { url = "https://files.pythonhosted.org/packages/4a/c9/9642ea855604aeb2968a8e145fc662edf61db7632ad2e4fb92424be6b6c0/kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2", size = 65311, upload-time = "2024-12-24T18:29:15.892Z" }, - { url = "https://files.pythonhosted.org/packages/fc/aa/cea685c4ab647f349c3bc92d2daf7ae34c8e8cf405a6dcd3a497f58a2ac3/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502", size = 124152, upload-time = "2024-12-24T18:29:16.85Z" }, - { url = "https://files.pythonhosted.org/packages/c5/0b/8db6d2e2452d60d5ebc4ce4b204feeb16176a851fd42462f66ade6808084/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31", size = 66555, upload-time = "2024-12-24T18:29:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/60/26/d6a0db6785dd35d3ba5bf2b2df0aedc5af089962c6eb2cbf67a15b81369e/kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb", size = 65067, upload-time = "2024-12-24T18:29:20.096Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ed/1d97f7e3561e09757a196231edccc1bcf59d55ddccefa2afc9c615abd8e0/kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f", size = 1378443, upload-time = "2024-12-24T18:29:22.843Z" }, - { url = "https://files.pythonhosted.org/packages/29/61/39d30b99954e6b46f760e6289c12fede2ab96a254c443639052d1b573fbc/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc", size = 1472728, upload-time = "2024-12-24T18:29:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/0c/3e/804163b932f7603ef256e4a715e5843a9600802bb23a68b4e08c8c0ff61d/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a", size = 1478388, upload-time = "2024-12-24T18:29:25.776Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9e/60eaa75169a154700be74f875a4d9961b11ba048bef315fbe89cb6999056/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a", size = 1413849, upload-time = "2024-12-24T18:29:27.202Z" }, - { url = "https://files.pythonhosted.org/packages/bc/b3/9458adb9472e61a998c8c4d95cfdfec91c73c53a375b30b1428310f923e4/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a", size = 1475533, upload-time = "2024-12-24T18:29:28.638Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7a/0a42d9571e35798de80aef4bb43a9b672aa7f8e58643d7bd1950398ffb0a/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3", size = 2268898, upload-time = "2024-12-24T18:29:30.368Z" }, - { url = "https://files.pythonhosted.org/packages/d9/07/1255dc8d80271400126ed8db35a1795b1a2c098ac3a72645075d06fe5c5d/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b", size = 2425605, upload-time = "2024-12-24T18:29:33.151Z" }, - { url = "https://files.pythonhosted.org/packages/84/df/5a3b4cf13780ef6f6942df67b138b03b7e79e9f1f08f57c49957d5867f6e/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4", size = 2375801, upload-time = "2024-12-24T18:29:34.584Z" }, - { url = "https://files.pythonhosted.org/packages/8f/10/2348d068e8b0f635c8c86892788dac7a6b5c0cb12356620ab575775aad89/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d", size = 2520077, upload-time = "2024-12-24T18:29:36.138Z" }, - { url = "https://files.pythonhosted.org/packages/32/d8/014b89fee5d4dce157d814303b0fce4d31385a2af4c41fed194b173b81ac/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8", size = 2338410, upload-time = "2024-12-24T18:29:39.991Z" }, - { url = "https://files.pythonhosted.org/packages/bd/72/dfff0cc97f2a0776e1c9eb5bef1ddfd45f46246c6533b0191887a427bca5/kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50", size = 71853, upload-time = "2024-12-24T18:29:42.006Z" }, - { url = "https://files.pythonhosted.org/packages/dc/85/220d13d914485c0948a00f0b9eb419efaf6da81b7d72e88ce2391f7aed8d/kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476", size = 65424, upload-time = "2024-12-24T18:29:44.38Z" }, - { url = "https://files.pythonhosted.org/packages/79/b3/e62464a652f4f8cd9006e13d07abad844a47df1e6537f73ddfbf1bc997ec/kiwisolver-1.4.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1c8ceb754339793c24aee1c9fb2485b5b1f5bb1c2c214ff13368431e51fc9a09", size = 124156, upload-time = "2024-12-24T18:29:45.368Z" }, - { url = "https://files.pythonhosted.org/packages/8d/2d/f13d06998b546a2ad4f48607a146e045bbe48030774de29f90bdc573df15/kiwisolver-1.4.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a62808ac74b5e55a04a408cda6156f986cefbcf0ada13572696b507cc92fa1", size = 66555, upload-time = "2024-12-24T18:29:46.37Z" }, - { url = "https://files.pythonhosted.org/packages/59/e3/b8bd14b0a54998a9fd1e8da591c60998dc003618cb19a3f94cb233ec1511/kiwisolver-1.4.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68269e60ee4929893aad82666821aaacbd455284124817af45c11e50a4b42e3c", size = 65071, upload-time = "2024-12-24T18:29:47.333Z" }, - { url = "https://files.pythonhosted.org/packages/f0/1c/6c86f6d85ffe4d0ce04228d976f00674f1df5dc893bf2dd4f1928748f187/kiwisolver-1.4.8-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d142fba9c464bc3bbfeff15c96eab0e7310343d6aefb62a79d51421fcc5f1b", size = 1378053, upload-time = "2024-12-24T18:29:49.636Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b9/1c6e9f6dcb103ac5cf87cb695845f5fa71379021500153566d8a8a9fc291/kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc373e0eef45b59197de815b1b28ef89ae3955e7722cc9710fb91cd77b7f47", size = 1472278, upload-time = "2024-12-24T18:29:51.164Z" }, - { url = "https://files.pythonhosted.org/packages/ee/81/aca1eb176de671f8bda479b11acdc42c132b61a2ac861c883907dde6debb/kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77e6f57a20b9bd4e1e2cedda4d0b986ebd0216236f0106e55c28aea3d3d69b16", size = 1478139, upload-time = "2024-12-24T18:29:52.594Z" }, - { url = "https://files.pythonhosted.org/packages/49/f4/e081522473671c97b2687d380e9e4c26f748a86363ce5af48b4a28e48d06/kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08e77738ed7538f036cd1170cbed942ef749137b1311fa2bbe2a7fda2f6bf3cc", size = 1413517, upload-time = "2024-12-24T18:29:53.941Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e9/6a7d025d8da8c4931522922cd706105aa32b3291d1add8c5427cdcd66e63/kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5ce1e481a74b44dd5e92ff03ea0cb371ae7a0268318e202be06c8f04f4f1246", size = 1474952, upload-time = "2024-12-24T18:29:56.523Z" }, - { url = "https://files.pythonhosted.org/packages/82/13/13fa685ae167bee5d94b415991c4fc7bb0a1b6ebea6e753a87044b209678/kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc2ace710ba7c1dfd1a3b42530b62b9ceed115f19a1656adefce7b1782a37794", size = 2269132, upload-time = "2024-12-24T18:29:57.989Z" }, - { url = "https://files.pythonhosted.org/packages/ef/92/bb7c9395489b99a6cb41d502d3686bac692586db2045adc19e45ee64ed23/kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3452046c37c7692bd52b0e752b87954ef86ee2224e624ef7ce6cb21e8c41cc1b", size = 2425997, upload-time = "2024-12-24T18:29:59.393Z" }, - { url = "https://files.pythonhosted.org/packages/ed/12/87f0e9271e2b63d35d0d8524954145837dd1a6c15b62a2d8c1ebe0f182b4/kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e9a60b50fe8b2ec6f448fe8d81b07e40141bfced7f896309df271a0b92f80f3", size = 2376060, upload-time = "2024-12-24T18:30:01.338Z" }, - { url = "https://files.pythonhosted.org/packages/02/6e/c8af39288edbce8bf0fa35dee427b082758a4b71e9c91ef18fa667782138/kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:918139571133f366e8362fa4a297aeba86c7816b7ecf0bc79168080e2bd79957", size = 2520471, upload-time = "2024-12-24T18:30:04.574Z" }, - { url = "https://files.pythonhosted.org/packages/13/78/df381bc7b26e535c91469f77f16adcd073beb3e2dd25042efd064af82323/kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e063ef9f89885a1d68dd8b2e18f5ead48653176d10a0e324e3b0030e3a69adeb", size = 2338793, upload-time = "2024-12-24T18:30:06.25Z" }, - { url = "https://files.pythonhosted.org/packages/d0/dc/c1abe38c37c071d0fc71c9a474fd0b9ede05d42f5a458d584619cfd2371a/kiwisolver-1.4.8-cp313-cp313-win_amd64.whl", hash = "sha256:a17b7c4f5b2c51bb68ed379defd608a03954a1845dfed7cc0117f1cc8a9b7fd2", size = 71855, upload-time = "2024-12-24T18:30:07.535Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b6/21529d595b126ac298fdd90b705d87d4c5693de60023e0efcb4f387ed99e/kiwisolver-1.4.8-cp313-cp313-win_arm64.whl", hash = "sha256:3cd3bc628b25f74aedc6d374d5babf0166a92ff1317f46267f12d2ed54bc1d30", size = 65430, upload-time = "2024-12-24T18:30:08.504Z" }, - { url = "https://files.pythonhosted.org/packages/34/bd/b89380b7298e3af9b39f49334e3e2a4af0e04819789f04b43d560516c0c8/kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:370fd2df41660ed4e26b8c9d6bbcad668fbe2560462cba151a721d49e5b6628c", size = 126294, upload-time = "2024-12-24T18:30:09.508Z" }, - { url = "https://files.pythonhosted.org/packages/83/41/5857dc72e5e4148eaac5aa76e0703e594e4465f8ab7ec0fc60e3a9bb8fea/kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:84a2f830d42707de1d191b9490ac186bf7997a9495d4e9072210a1296345f7dc", size = 67736, upload-time = "2024-12-24T18:30:11.039Z" }, - { url = "https://files.pythonhosted.org/packages/e1/d1/be059b8db56ac270489fb0b3297fd1e53d195ba76e9bbb30e5401fa6b759/kiwisolver-1.4.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a3ad337add5148cf51ce0b55642dc551c0b9d6248458a757f98796ca7348712", size = 66194, upload-time = "2024-12-24T18:30:14.886Z" }, - { url = "https://files.pythonhosted.org/packages/e1/83/4b73975f149819eb7dcf9299ed467eba068ecb16439a98990dcb12e63fdd/kiwisolver-1.4.8-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7506488470f41169b86d8c9aeff587293f530a23a23a49d6bc64dab66bedc71e", size = 1465942, upload-time = "2024-12-24T18:30:18.927Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2c/30a5cdde5102958e602c07466bce058b9d7cb48734aa7a4327261ac8e002/kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0121b07b356a22fb0414cec4666bbe36fd6d0d759db3d37228f496ed67c880", size = 1595341, upload-time = "2024-12-24T18:30:22.102Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9b/1e71db1c000385aa069704f5990574b8244cce854ecd83119c19e83c9586/kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6d6bd87df62c27d4185de7c511c6248040afae67028a8a22012b010bc7ad062", size = 1598455, upload-time = "2024-12-24T18:30:24.947Z" }, - { url = "https://files.pythonhosted.org/packages/85/92/c8fec52ddf06231b31cbb779af77e99b8253cd96bd135250b9498144c78b/kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291331973c64bb9cce50bbe871fb2e675c4331dab4f31abe89f175ad7679a4d7", size = 1522138, upload-time = "2024-12-24T18:30:26.286Z" }, - { url = "https://files.pythonhosted.org/packages/0b/51/9eb7e2cd07a15d8bdd976f6190c0164f92ce1904e5c0c79198c4972926b7/kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:893f5525bb92d3d735878ec00f781b2de998333659507d29ea4466208df37bed", size = 1582857, upload-time = "2024-12-24T18:30:28.86Z" }, - { url = "https://files.pythonhosted.org/packages/0f/95/c5a00387a5405e68ba32cc64af65ce881a39b98d73cc394b24143bebc5b8/kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b47a465040146981dc9db8647981b8cb96366fbc8d452b031e4f8fdffec3f26d", size = 2293129, upload-time = "2024-12-24T18:30:30.34Z" }, - { url = "https://files.pythonhosted.org/packages/44/83/eeb7af7d706b8347548313fa3a3a15931f404533cc54fe01f39e830dd231/kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:99cea8b9dd34ff80c521aef46a1dddb0dcc0283cf18bde6d756f1e6f31772165", size = 2421538, upload-time = "2024-12-24T18:30:33.334Z" }, - { url = "https://files.pythonhosted.org/packages/05/f9/27e94c1b3eb29e6933b6986ffc5fa1177d2cd1f0c8efc5f02c91c9ac61de/kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:151dffc4865e5fe6dafce5480fab84f950d14566c480c08a53c663a0020504b6", size = 2390661, upload-time = "2024-12-24T18:30:34.939Z" }, - { url = "https://files.pythonhosted.org/packages/d9/d4/3c9735faa36ac591a4afcc2980d2691000506050b7a7e80bcfe44048daa7/kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:577facaa411c10421314598b50413aa1ebcf5126f704f1e5d72d7e4e9f020d90", size = 2546710, upload-time = "2024-12-24T18:30:37.281Z" }, - { url = "https://files.pythonhosted.org/packages/4c/fa/be89a49c640930180657482a74970cdcf6f7072c8d2471e1babe17a222dc/kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:be4816dc51c8a471749d664161b434912eee82f2ea66bd7628bd14583a833e85", size = 2349213, upload-time = "2024-12-24T18:30:40.019Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f9/ae81c47a43e33b93b0a9819cac6723257f5da2a5a60daf46aa5c7226ea85/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a", size = 60403, upload-time = "2024-12-24T18:30:41.372Z" }, - { url = "https://files.pythonhosted.org/packages/58/ca/f92b5cb6f4ce0c1ebfcfe3e2e42b96917e16f7090e45b21102941924f18f/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8", size = 58657, upload-time = "2024-12-24T18:30:42.392Z" }, - { url = "https://files.pythonhosted.org/packages/80/28/ae0240f732f0484d3a4dc885d055653c47144bdf59b670aae0ec3c65a7c8/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0", size = 84948, upload-time = "2024-12-24T18:30:44.703Z" }, - { url = "https://files.pythonhosted.org/packages/5d/eb/78d50346c51db22c7203c1611f9b513075f35c4e0e4877c5dde378d66043/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c", size = 81186, upload-time = "2024-12-24T18:30:45.654Z" }, - { url = "https://files.pythonhosted.org/packages/43/f8/7259f18c77adca88d5f64f9a522792e178b2691f3748817a8750c2d216ef/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b", size = 80279, upload-time = "2024-12-24T18:30:47.951Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1d/50ad811d1c5dae091e4cf046beba925bcae0a610e79ae4c538f996f63ed5/kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b", size = 71762, upload-time = "2024-12-24T18:30:48.903Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/5d/8ce64e36d4e3aac5ca96996457dcf33e34e6051492399a3f1fec5657f30b/kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b", size = 124159, upload-time = "2025-08-10T21:25:35.472Z" }, + { url = "https://files.pythonhosted.org/packages/96/1e/22f63ec454874378175a5f435d6ea1363dd33fb2af832c6643e4ccea0dc8/kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f", size = 66578, upload-time = "2025-08-10T21:25:36.73Z" }, + { url = "https://files.pythonhosted.org/packages/41/4c/1925dcfff47a02d465121967b95151c82d11027d5ec5242771e580e731bd/kiwisolver-1.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84fd60810829c27ae375114cd379da1fa65e6918e1da405f356a775d49a62bcf", size = 65312, upload-time = "2025-08-10T21:25:37.658Z" }, + { url = "https://files.pythonhosted.org/packages/d4/42/0f333164e6307a0687d1eb9ad256215aae2f4bd5d28f4653d6cd319a3ba3/kiwisolver-1.4.9-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78efa4c6e804ecdf727e580dbb9cba85624d2e1c6b5cb059c66290063bd99a9", size = 1628458, upload-time = "2025-08-10T21:25:39.067Z" }, + { url = "https://files.pythonhosted.org/packages/86/b6/2dccb977d651943995a90bfe3495c2ab2ba5cd77093d9f2318a20c9a6f59/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4efec7bcf21671db6a3294ff301d2fc861c31faa3c8740d1a94689234d1b415", size = 1225640, upload-time = "2025-08-10T21:25:40.489Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/362ebd3eec46c850ccf2bfe3e30f2fc4c008750011f38a850f088c56a1c6/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90f47e70293fc3688b71271100a1a5453aa9944a81d27ff779c108372cf5567b", size = 1244074, upload-time = "2025-08-10T21:25:42.221Z" }, + { url = "https://files.pythonhosted.org/packages/6f/bb/f09a1e66dab8984773d13184a10a29fe67125337649d26bdef547024ed6b/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fdca1def57a2e88ef339de1737a1449d6dbf5fab184c54a1fca01d541317154", size = 1293036, upload-time = "2025-08-10T21:25:43.801Z" }, + { url = "https://files.pythonhosted.org/packages/ea/01/11ecf892f201cafda0f68fa59212edaea93e96c37884b747c181303fccd1/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cf554f21be770f5111a1690d42313e140355e687e05cf82cb23d0a721a64a48", size = 2175310, upload-time = "2025-08-10T21:25:45.045Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5f/bfe11d5b934f500cc004314819ea92427e6e5462706a498c1d4fc052e08f/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1795ac5cd0510207482c3d1d3ed781143383b8cfd36f5c645f3897ce066220", size = 2270943, upload-time = "2025-08-10T21:25:46.393Z" }, + { url = "https://files.pythonhosted.org/packages/3d/de/259f786bf71f1e03e73d87e2db1a9a3bcab64d7b4fd780167123161630ad/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ccd09f20ccdbbd341b21a67ab50a119b64a403b09288c27481575105283c1586", size = 2440488, upload-time = "2025-08-10T21:25:48.074Z" }, + { url = "https://files.pythonhosted.org/packages/1b/76/c989c278faf037c4d3421ec07a5c452cd3e09545d6dae7f87c15f54e4edf/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:540c7c72324d864406a009d72f5d6856f49693db95d1fbb46cf86febef873634", size = 2246787, upload-time = "2025-08-10T21:25:49.442Z" }, + { url = "https://files.pythonhosted.org/packages/a2/55/c2898d84ca440852e560ca9f2a0d28e6e931ac0849b896d77231929900e7/kiwisolver-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:ede8c6d533bc6601a47ad4046080d36b8fc99f81e6f1c17b0ac3c2dc91ac7611", size = 73730, upload-time = "2025-08-10T21:25:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/e8/09/486d6ac523dd33b80b368247f238125d027964cfacb45c654841e88fb2ae/kiwisolver-1.4.9-cp310-cp310-win_arm64.whl", hash = "sha256:7b4da0d01ac866a57dd61ac258c5607b4cd677f63abaec7b148354d2b2cdd536", size = 65036, upload-time = "2025-08-10T21:25:52.063Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, + { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" }, + { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" }, + { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" }, + { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, + { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, + { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, + { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, + { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, + { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, + { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" }, + { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" }, + { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" }, + { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" }, + { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" }, + { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" }, + { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" }, + { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" }, + { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" }, + { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" }, + { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" }, + { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" }, + { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" }, + { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" }, + { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" }, + { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" }, + { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" }, + { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" }, + { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" }, + { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" }, + { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" }, + { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" }, + { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" }, + { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" }, + { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, + { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, + { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, + { url = "https://files.pythonhosted.org/packages/a2/63/fde392691690f55b38d5dd7b3710f5353bf7a8e52de93a22968801ab8978/kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d1d9e582ad4d63062d34077a9a1e9f3c34088a2ec5135b1f7190c07cf366527", size = 60183, upload-time = "2025-08-10T21:27:37.669Z" }, + { url = "https://files.pythonhosted.org/packages/27/b1/6aad34edfdb7cced27f371866f211332bba215bfd918ad3322a58f480d8b/kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:deed0c7258ceb4c44ad5ec7d9918f9f14fd05b2be86378d86cf50e63d1e7b771", size = 58675, upload-time = "2025-08-10T21:27:39.031Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1a/23d855a702bb35a76faed5ae2ba3de57d323f48b1f6b17ee2176c4849463/kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a590506f303f512dff6b7f75fd2fd18e16943efee932008fe7140e5fa91d80e", size = 80277, upload-time = "2025-08-10T21:27:40.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/5b/5239e3c2b8fb5afa1e8508f721bb77325f740ab6994d963e61b2b7abcc1e/kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e09c2279a4d01f099f52d5c4b3d9e208e91edcbd1a175c9662a8b16e000fece9", size = 77994, upload-time = "2025-08-10T21:27:41.181Z" }, + { url = "https://files.pythonhosted.org/packages/f9/1c/5d4d468fb16f8410e596ed0eac02d2c68752aa7dc92997fe9d60a7147665/kiwisolver-1.4.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c9e7cdf45d594ee04d5be1b24dd9d49f3d1590959b2271fb30b5ca2b262c00fb", size = 73744, upload-time = "2025-08-10T21:27:42.254Z" }, + { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, + { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, ] [[package]] @@ -1346,7 +1486,7 @@ wheels = [ [[package]] name = "matplotlib" -version = "3.10.3" +version = "3.10.5" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", @@ -1354,52 +1494,74 @@ resolution-markers = [ "python_full_version == '3.10.*'", ] dependencies = [ - { name = "contourpy", version = "1.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "contourpy", version = "1.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "contourpy", version = "1.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "cycler", marker = "python_full_version >= '3.10'" }, { name = "fonttools", marker = "python_full_version >= '3.10'" }, - { name = "kiwisolver", version = "1.4.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "kiwisolver", version = "1.4.9", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "packaging", marker = "python_full_version >= '3.10'" }, { name = "pillow", marker = "python_full_version >= '3.10'" }, { name = "pyparsing", marker = "python_full_version >= '3.10'" }, { name = "python-dateutil", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/91/d49359a21893183ed2a5b6c76bec40e0b1dcbf8ca148f864d134897cfc75/matplotlib-3.10.3.tar.gz", hash = "sha256:2f82d2c5bb7ae93aaaa4cd42aca65d76ce6376f83304fa3a630b569aca274df0", size = 34799811, upload-time = "2025-05-08T19:10:54.39Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/ea/2bba25d289d389c7451f331ecd593944b3705f06ddf593fa7be75037d308/matplotlib-3.10.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:213fadd6348d106ca7db99e113f1bea1e65e383c3ba76e8556ba4a3054b65ae7", size = 8167862, upload-time = "2025-05-08T19:09:39.563Z" }, - { url = "https://files.pythonhosted.org/packages/41/81/cc70b5138c926604e8c9ed810ed4c79e8116ba72e02230852f5c12c87ba2/matplotlib-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3bec61cb8221f0ca6313889308326e7bb303d0d302c5cc9e523b2f2e6c73deb", size = 8042149, upload-time = "2025-05-08T19:09:42.413Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9a/0ff45b6bfa42bb16de597e6058edf2361c298ad5ef93b327728145161bbf/matplotlib-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c21ae75651c0231b3ba014b6d5e08fb969c40cdb5a011e33e99ed0c9ea86ecb", size = 8453719, upload-time = "2025-05-08T19:09:44.901Z" }, - { url = "https://files.pythonhosted.org/packages/85/c7/1866e972fed6d71ef136efbc980d4d1854ab7ef1ea8152bbd995ca231c81/matplotlib-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e39755580b08e30e3620efc659330eac5d6534ab7eae50fa5e31f53ee4e30", size = 8590801, upload-time = "2025-05-08T19:09:47.404Z" }, - { url = "https://files.pythonhosted.org/packages/5d/b9/748f6626d534ab7e255bdc39dc22634d337cf3ce200f261b5d65742044a1/matplotlib-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf4636203e1190871d3a73664dea03d26fb019b66692cbfd642faafdad6208e8", size = 9402111, upload-time = "2025-05-08T19:09:49.474Z" }, - { url = "https://files.pythonhosted.org/packages/1f/78/8bf07bd8fb67ea5665a6af188e70b57fcb2ab67057daa06b85a08e59160a/matplotlib-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:fd5641a9bb9d55f4dd2afe897a53b537c834b9012684c8444cc105895c8c16fd", size = 8057213, upload-time = "2025-05-08T19:09:51.489Z" }, - { url = "https://files.pythonhosted.org/packages/f5/bd/af9f655456f60fe1d575f54fb14704ee299b16e999704817a7645dfce6b0/matplotlib-3.10.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0ef061f74cd488586f552d0c336b2f078d43bc00dc473d2c3e7bfee2272f3fa8", size = 8178873, upload-time = "2025-05-08T19:09:53.857Z" }, - { url = "https://files.pythonhosted.org/packages/c2/86/e1c86690610661cd716eda5f9d0b35eaf606ae6c9b6736687cfc8f2d0cd8/matplotlib-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d96985d14dc5f4a736bbea4b9de9afaa735f8a0fc2ca75be2fa9e96b2097369d", size = 8052205, upload-time = "2025-05-08T19:09:55.684Z" }, - { url = "https://files.pythonhosted.org/packages/54/51/a9f8e49af3883dacddb2da1af5fca1f7468677f1188936452dd9aaaeb9ed/matplotlib-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5f0283da91e9522bdba4d6583ed9d5521566f63729ffb68334f86d0bb98049", size = 8465823, upload-time = "2025-05-08T19:09:57.442Z" }, - { url = "https://files.pythonhosted.org/packages/e7/e3/c82963a3b86d6e6d5874cbeaa390166458a7f1961bab9feb14d3d1a10f02/matplotlib-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdfa07c0ec58035242bc8b2c8aae37037c9a886370eef6850703d7583e19964b", size = 8606464, upload-time = "2025-05-08T19:09:59.471Z" }, - { url = "https://files.pythonhosted.org/packages/0e/34/24da1027e7fcdd9e82da3194c470143c551852757a4b473a09a012f5b945/matplotlib-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c0b9849a17bce080a16ebcb80a7b714b5677d0ec32161a2cc0a8e5a6030ae220", size = 9413103, upload-time = "2025-05-08T19:10:03.208Z" }, - { url = "https://files.pythonhosted.org/packages/a6/da/948a017c3ea13fd4a97afad5fdebe2f5bbc4d28c0654510ce6fd6b06b7bd/matplotlib-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:eef6ed6c03717083bc6d69c2d7ee8624205c29a8e6ea5a31cd3492ecdbaee1e1", size = 8065492, upload-time = "2025-05-08T19:10:05.271Z" }, - { url = "https://files.pythonhosted.org/packages/eb/43/6b80eb47d1071f234ef0c96ca370c2ca621f91c12045f1401b5c9b28a639/matplotlib-3.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ab1affc11d1f495ab9e6362b8174a25afc19c081ba5b0775ef00533a4236eea", size = 8179689, upload-time = "2025-05-08T19:10:07.602Z" }, - { url = "https://files.pythonhosted.org/packages/0f/70/d61a591958325c357204870b5e7b164f93f2a8cca1dc6ce940f563909a13/matplotlib-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2a818d8bdcafa7ed2eed74487fdb071c09c1ae24152d403952adad11fa3c65b4", size = 8050466, upload-time = "2025-05-08T19:10:09.383Z" }, - { url = "https://files.pythonhosted.org/packages/e7/75/70c9d2306203148cc7902a961240c5927dd8728afedf35e6a77e105a2985/matplotlib-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748ebc3470c253e770b17d8b0557f0aa85cf8c63fd52f1a61af5b27ec0b7ffee", size = 8456252, upload-time = "2025-05-08T19:10:11.958Z" }, - { url = "https://files.pythonhosted.org/packages/c4/91/ba0ae1ff4b3f30972ad01cd4a8029e70a0ec3b8ea5be04764b128b66f763/matplotlib-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed70453fd99733293ace1aec568255bc51c6361cb0da94fa5ebf0649fdb2150a", size = 8601321, upload-time = "2025-05-08T19:10:14.47Z" }, - { url = "https://files.pythonhosted.org/packages/d2/88/d636041eb54a84b889e11872d91f7cbf036b3b0e194a70fa064eb8b04f7a/matplotlib-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dbed9917b44070e55640bd13419de83b4c918e52d97561544814ba463811cbc7", size = 9406972, upload-time = "2025-05-08T19:10:16.569Z" }, - { url = "https://files.pythonhosted.org/packages/b1/79/0d1c165eac44405a86478082e225fce87874f7198300bbebc55faaf6d28d/matplotlib-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:cf37d8c6ef1a48829443e8ba5227b44236d7fcaf7647caa3178a4ff9f7a5be05", size = 8067954, upload-time = "2025-05-08T19:10:18.663Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c1/23cfb566a74c696a3b338d8955c549900d18fe2b898b6e94d682ca21e7c2/matplotlib-3.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9f2efccc8dcf2b86fc4ee849eea5dcaecedd0773b30f47980dc0cbeabf26ec84", size = 8180318, upload-time = "2025-05-08T19:10:20.426Z" }, - { url = "https://files.pythonhosted.org/packages/6c/0c/02f1c3b66b30da9ee343c343acbb6251bef5b01d34fad732446eaadcd108/matplotlib-3.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ddbba06a6c126e3301c3d272a99dcbe7f6c24c14024e80307ff03791a5f294e", size = 8051132, upload-time = "2025-05-08T19:10:22.569Z" }, - { url = "https://files.pythonhosted.org/packages/b4/ab/8db1a5ac9b3a7352fb914133001dae889f9fcecb3146541be46bed41339c/matplotlib-3.10.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748302b33ae9326995b238f606e9ed840bf5886ebafcb233775d946aa8107a15", size = 8457633, upload-time = "2025-05-08T19:10:24.749Z" }, - { url = "https://files.pythonhosted.org/packages/f5/64/41c4367bcaecbc03ef0d2a3ecee58a7065d0a36ae1aa817fe573a2da66d4/matplotlib-3.10.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80fcccbef63302c0efd78042ea3c2436104c5b1a4d3ae20f864593696364ac7", size = 8601031, upload-time = "2025-05-08T19:10:27.03Z" }, - { url = "https://files.pythonhosted.org/packages/12/6f/6cc79e9e5ab89d13ed64da28898e40fe5b105a9ab9c98f83abd24e46d7d7/matplotlib-3.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:55e46cbfe1f8586adb34f7587c3e4f7dedc59d5226719faf6cb54fc24f2fd52d", size = 9406988, upload-time = "2025-05-08T19:10:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/b1/0f/eed564407bd4d935ffabf561ed31099ed609e19287409a27b6d336848653/matplotlib-3.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:151d89cb8d33cb23345cd12490c76fd5d18a56581a16d950b48c6ff19bb2ab93", size = 8068034, upload-time = "2025-05-08T19:10:31.221Z" }, - { url = "https://files.pythonhosted.org/packages/3e/e5/2f14791ff69b12b09e9975e1d116d9578ac684460860ce542c2588cb7a1c/matplotlib-3.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c26dd9834e74d164d06433dc7be5d75a1e9890b926b3e57e74fa446e1a62c3e2", size = 8218223, upload-time = "2025-05-08T19:10:33.114Z" }, - { url = "https://files.pythonhosted.org/packages/5c/08/30a94afd828b6e02d0a52cae4a29d6e9ccfcf4c8b56cc28b021d3588873e/matplotlib-3.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:24853dad5b8c84c8c2390fc31ce4858b6df504156893292ce8092d190ef8151d", size = 8094985, upload-time = "2025-05-08T19:10:35.337Z" }, - { url = "https://files.pythonhosted.org/packages/89/44/f3bc6b53066c889d7a1a3ea8094c13af6a667c5ca6220ec60ecceec2dabe/matplotlib-3.10.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f7878214d369d7d4215e2a9075fef743be38fa401d32e6020bab2dfabaa566", size = 8483109, upload-time = "2025-05-08T19:10:37.611Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c7/473bc559beec08ebee9f86ca77a844b65747e1a6c2691e8c92e40b9f42a8/matplotlib-3.10.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6929fc618cb6db9cb75086f73b3219bbb25920cb24cee2ea7a12b04971a4158", size = 8618082, upload-time = "2025-05-08T19:10:39.892Z" }, - { url = "https://files.pythonhosted.org/packages/d8/e9/6ce8edd264c8819e37bbed8172e0ccdc7107fe86999b76ab5752276357a4/matplotlib-3.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c7818292a5cc372a2dc4c795e5c356942eb8350b98ef913f7fda51fe175ac5d", size = 9413699, upload-time = "2025-05-08T19:10:42.376Z" }, - { url = "https://files.pythonhosted.org/packages/1b/92/9a45c91089c3cf690b5badd4be81e392ff086ccca8a1d4e3a08463d8a966/matplotlib-3.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4f23ffe95c5667ef8a2b56eea9b53db7f43910fa4a2d5472ae0f72b64deab4d5", size = 8139044, upload-time = "2025-05-08T19:10:44.551Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d1/f54d43e95384b312ffa4a74a4326c722f3b8187aaaa12e9a84cdf3037131/matplotlib-3.10.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:86ab63d66bbc83fdb6733471d3bff40897c1e9921cba112accd748eee4bce5e4", size = 8162896, upload-time = "2025-05-08T19:10:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/24/a4/fbfc00c2346177c95b353dcf9b5a004106abe8730a62cb6f27e79df0a698/matplotlib-3.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a48f9c08bf7444b5d2391a83e75edb464ccda3c380384b36532a0962593a1751", size = 8039702, upload-time = "2025-05-08T19:10:49.634Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b9/59e120d24a2ec5fc2d30646adb2efb4621aab3c6d83d66fb2a7a182db032/matplotlib-3.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb73d8aa75a237457988f9765e4dfe1c0d2453c5ca4eabc897d4309672c8e014", size = 8594298, upload-time = "2025-05-08T19:10:51.738Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/43/91/f2939bb60b7ebf12478b030e0d7f340247390f402b3b189616aad790c366/matplotlib-3.10.5.tar.gz", hash = "sha256:352ed6ccfb7998a00881692f38b4ca083c691d3e275b4145423704c34c909076", size = 34804044, upload-time = "2025-07-31T18:09:33.805Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/89/5355cdfe43242cb4d1a64a67cb6831398b665ad90e9702c16247cbd8d5ab/matplotlib-3.10.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5d4773a6d1c106ca05cb5a5515d277a6bb96ed09e5c8fab6b7741b8fcaa62c8f", size = 8229094, upload-time = "2025-07-31T18:07:36.507Z" }, + { url = "https://files.pythonhosted.org/packages/34/bc/ba802650e1c69650faed261a9df004af4c6f21759d7a1ec67fe972f093b3/matplotlib-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc88af74e7ba27de6cbe6faee916024ea35d895ed3d61ef6f58c4ce97da7185a", size = 8091464, upload-time = "2025-07-31T18:07:38.864Z" }, + { url = "https://files.pythonhosted.org/packages/ac/64/8d0c8937dee86c286625bddb1902efacc3e22f2b619f5b5a8df29fe5217b/matplotlib-3.10.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:64c4535419d5617f7363dad171a5a59963308e0f3f813c4bed6c9e6e2c131512", size = 8653163, upload-time = "2025-07-31T18:07:41.141Z" }, + { url = "https://files.pythonhosted.org/packages/11/dc/8dfc0acfbdc2fc2336c72561b7935cfa73db9ca70b875d8d3e1b3a6f371a/matplotlib-3.10.5-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a277033048ab22d34f88a3c5243938cef776493f6201a8742ed5f8b553201343", size = 9490635, upload-time = "2025-07-31T18:07:42.936Z" }, + { url = "https://files.pythonhosted.org/packages/54/02/e3fdfe0f2e9fb05f3a691d63876639dbf684170fdcf93231e973104153b4/matplotlib-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e4a6470a118a2e93022ecc7d3bd16b3114b2004ea2bf014fff875b3bc99b70c6", size = 9539036, upload-time = "2025-07-31T18:07:45.18Z" }, + { url = "https://files.pythonhosted.org/packages/c1/29/82bf486ff7f4dbedfb11ccc207d0575cbe3be6ea26f75be514252bde3d70/matplotlib-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:7e44cada61bec8833c106547786814dd4a266c1b2964fd25daa3804f1b8d4467", size = 8093529, upload-time = "2025-07-31T18:07:49.553Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c7/1f2db90a1d43710478bb1e9b57b162852f79234d28e4f48a28cc415aa583/matplotlib-3.10.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:dcfc39c452c6a9f9028d3e44d2d721484f665304857188124b505b2c95e1eecf", size = 8239216, upload-time = "2025-07-31T18:07:51.947Z" }, + { url = "https://files.pythonhosted.org/packages/82/6d/ca6844c77a4f89b1c9e4d481c412e1d1dbabf2aae2cbc5aa2da4a1d6683e/matplotlib-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:903352681b59f3efbf4546985142a9686ea1d616bb054b09a537a06e4b892ccf", size = 8102130, upload-time = "2025-07-31T18:07:53.65Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1e/5e187a30cc673a3e384f3723e5f3c416033c1d8d5da414f82e4e731128ea/matplotlib-3.10.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:080c3676a56b8ee1c762bcf8fca3fe709daa1ee23e6ef06ad9f3fc17332f2d2a", size = 8666471, upload-time = "2025-07-31T18:07:55.304Z" }, + { url = "https://files.pythonhosted.org/packages/03/c0/95540d584d7d645324db99a845ac194e915ef75011a0d5e19e1b5cee7e69/matplotlib-3.10.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b4984d5064a35b6f66d2c11d668565f4389b1119cc64db7a4c1725bc11adffc", size = 9500518, upload-time = "2025-07-31T18:07:57.199Z" }, + { url = "https://files.pythonhosted.org/packages/ba/2e/e019352099ea58b4169adb9c6e1a2ad0c568c6377c2b677ee1f06de2adc7/matplotlib-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3967424121d3a46705c9fa9bdb0931de3228f13f73d7bb03c999c88343a89d89", size = 9552372, upload-time = "2025-07-31T18:07:59.41Z" }, + { url = "https://files.pythonhosted.org/packages/b7/81/3200b792a5e8b354f31f4101ad7834743ad07b6d620259f2059317b25e4d/matplotlib-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:33775bbeb75528555a15ac29396940128ef5613cf9a2d31fb1bfd18b3c0c0903", size = 8100634, upload-time = "2025-07-31T18:08:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/52/46/a944f6f0c1f5476a0adfa501969d229ce5ae60cf9a663be0e70361381f89/matplotlib-3.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:c61333a8e5e6240e73769d5826b9a31d8b22df76c0778f8480baf1b4b01c9420", size = 7978880, upload-time = "2025-07-31T18:08:03.407Z" }, + { url = "https://files.pythonhosted.org/packages/66/1e/c6f6bcd882d589410b475ca1fc22e34e34c82adff519caf18f3e6dd9d682/matplotlib-3.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:00b6feadc28a08bd3c65b2894f56cf3c94fc8f7adcbc6ab4516ae1e8ed8f62e2", size = 8253056, upload-time = "2025-07-31T18:08:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/53/e6/d6f7d1b59413f233793dda14419776f5f443bcccb2dfc84b09f09fe05dbe/matplotlib-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee98a5c5344dc7f48dc261b6ba5d9900c008fc12beb3fa6ebda81273602cc389", size = 8110131, upload-time = "2025-07-31T18:08:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/66/2b/bed8a45e74957549197a2ac2e1259671cd80b55ed9e1fe2b5c94d88a9202/matplotlib-3.10.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a17e57e33de901d221a07af32c08870ed4528db0b6059dce7d7e65c1122d4bea", size = 8669603, upload-time = "2025-07-31T18:08:09.064Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a7/315e9435b10d057f5e52dfc603cd353167ae28bb1a4e033d41540c0067a4/matplotlib-3.10.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97b9d6443419085950ee4a5b1ee08c363e5c43d7176e55513479e53669e88468", size = 9508127, upload-time = "2025-07-31T18:08:10.845Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d9/edcbb1f02ca99165365d2768d517898c22c6040187e2ae2ce7294437c413/matplotlib-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ceefe5d40807d29a66ae916c6a3915d60ef9f028ce1927b84e727be91d884369", size = 9566926, upload-time = "2025-07-31T18:08:13.186Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d9/6dd924ad5616c97b7308e6320cf392c466237a82a2040381163b7500510a/matplotlib-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:c04cba0f93d40e45b3c187c6c52c17f24535b27d545f757a2fffebc06c12b98b", size = 8107599, upload-time = "2025-07-31T18:08:15.116Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f3/522dc319a50f7b0279fbe74f86f7a3506ce414bc23172098e8d2bdf21894/matplotlib-3.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:a41bcb6e2c8e79dc99c5511ae6f7787d2fb52efd3d805fff06d5d4f667db16b2", size = 7978173, upload-time = "2025-07-31T18:08:21.518Z" }, + { url = "https://files.pythonhosted.org/packages/8d/05/4f3c1f396075f108515e45cb8d334aff011a922350e502a7472e24c52d77/matplotlib-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:354204db3f7d5caaa10e5de74549ef6a05a4550fdd1c8f831ab9bca81efd39ed", size = 8253586, upload-time = "2025-07-31T18:08:23.107Z" }, + { url = "https://files.pythonhosted.org/packages/2f/2c/e084415775aac7016c3719fe7006cdb462582c6c99ac142f27303c56e243/matplotlib-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b072aac0c3ad563a2b3318124756cb6112157017f7431626600ecbe890df57a1", size = 8110715, upload-time = "2025-07-31T18:08:24.675Z" }, + { url = "https://files.pythonhosted.org/packages/52/1b/233e3094b749df16e3e6cd5a44849fd33852e692ad009cf7de00cf58ddf6/matplotlib-3.10.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d52fd5b684d541b5a51fb276b2b97b010c75bee9aa392f96b4a07aeb491e33c7", size = 8669397, upload-time = "2025-07-31T18:08:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ec/03f9e003a798f907d9f772eed9b7c6a9775d5bd00648b643ebfb88e25414/matplotlib-3.10.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee7a09ae2f4676276f5a65bd9f2bd91b4f9fbaedf49f40267ce3f9b448de501f", size = 9508646, upload-time = "2025-07-31T18:08:28.848Z" }, + { url = "https://files.pythonhosted.org/packages/91/e7/c051a7a386680c28487bca27d23b02d84f63e3d2a9b4d2fc478e6a42e37e/matplotlib-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ba6c3c9c067b83481d647af88b4e441d532acdb5ef22178a14935b0b881188f4", size = 9567424, upload-time = "2025-07-31T18:08:30.726Z" }, + { url = "https://files.pythonhosted.org/packages/36/c2/24302e93ff431b8f4173ee1dd88976c8d80483cadbc5d3d777cef47b3a1c/matplotlib-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:07442d2692c9bd1cceaa4afb4bbe5b57b98a7599de4dabfcca92d3eea70f9ebe", size = 8107809, upload-time = "2025-07-31T18:08:33.928Z" }, + { url = "https://files.pythonhosted.org/packages/0b/33/423ec6a668d375dad825197557ed8fbdb74d62b432c1ed8235465945475f/matplotlib-3.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:48fe6d47380b68a37ccfcc94f009530e84d41f71f5dae7eda7c4a5a84aa0a674", size = 7978078, upload-time = "2025-07-31T18:08:36.764Z" }, + { url = "https://files.pythonhosted.org/packages/51/17/521fc16ec766455c7bb52cc046550cf7652f6765ca8650ff120aa2d197b6/matplotlib-3.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b80eb8621331449fc519541a7461987f10afa4f9cfd91afcd2276ebe19bd56c", size = 8295590, upload-time = "2025-07-31T18:08:38.521Z" }, + { url = "https://files.pythonhosted.org/packages/f8/12/23c28b2c21114c63999bae129fce7fd34515641c517ae48ce7b7dcd33458/matplotlib-3.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47a388908e469d6ca2a6015858fa924e0e8a2345a37125948d8e93a91c47933e", size = 8158518, upload-time = "2025-07-31T18:08:40.195Z" }, + { url = "https://files.pythonhosted.org/packages/81/f8/aae4eb25e8e7190759f3cb91cbeaa344128159ac92bb6b409e24f8711f78/matplotlib-3.10.5-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8b6b49167d208358983ce26e43aa4196073b4702858670f2eb111f9a10652b4b", size = 8691815, upload-time = "2025-07-31T18:08:42.238Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ba/450c39ebdd486bd33a359fc17365ade46c6a96bf637bbb0df7824de2886c/matplotlib-3.10.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a8da0453a7fd8e3da114234ba70c5ba9ef0e98f190309ddfde0f089accd46ea", size = 9522814, upload-time = "2025-07-31T18:08:44.914Z" }, + { url = "https://files.pythonhosted.org/packages/89/11/9c66f6a990e27bb9aa023f7988d2d5809cb98aa39c09cbf20fba75a542ef/matplotlib-3.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52c6573dfcb7726a9907b482cd5b92e6b5499b284ffacb04ffbfe06b3e568124", size = 9573917, upload-time = "2025-07-31T18:08:47.038Z" }, + { url = "https://files.pythonhosted.org/packages/b3/69/8b49394de92569419e5e05e82e83df9b749a0ff550d07631ea96ed2eb35a/matplotlib-3.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:a23193db2e9d64ece69cac0c8231849db7dd77ce59c7b89948cf9d0ce655a3ce", size = 8181034, upload-time = "2025-07-31T18:08:48.943Z" }, + { url = "https://files.pythonhosted.org/packages/47/23/82dc435bb98a2fc5c20dffcac8f0b083935ac28286413ed8835df40d0baa/matplotlib-3.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:56da3b102cf6da2776fef3e71cd96fcf22103a13594a18ac9a9b31314e0be154", size = 8023337, upload-time = "2025-07-31T18:08:50.791Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/26b6cfde31f5383503ee45dcb7e691d45dadf0b3f54639332b59316a97f8/matplotlib-3.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:96ef8f5a3696f20f55597ffa91c28e2e73088df25c555f8d4754931515512715", size = 8253591, upload-time = "2025-07-31T18:08:53.254Z" }, + { url = "https://files.pythonhosted.org/packages/c1/89/98488c7ef7ea20ea659af7499628c240a608b337af4be2066d644cfd0a0f/matplotlib-3.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:77fab633e94b9da60512d4fa0213daeb76d5a7b05156840c4fd0399b4b818837", size = 8112566, upload-time = "2025-07-31T18:08:55.116Z" }, + { url = "https://files.pythonhosted.org/packages/52/67/42294dfedc82aea55e1a767daf3263aacfb5a125f44ba189e685bab41b6f/matplotlib-3.10.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27f52634315e96b1debbfdc5c416592edcd9c4221bc2f520fd39c33db5d9f202", size = 9513281, upload-time = "2025-07-31T18:08:56.885Z" }, + { url = "https://files.pythonhosted.org/packages/e7/68/f258239e0cf34c2cbc816781c7ab6fca768452e6bf1119aedd2bd4a882a3/matplotlib-3.10.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:525f6e28c485c769d1f07935b660c864de41c37fd716bfa64158ea646f7084bb", size = 9780873, upload-time = "2025-07-31T18:08:59.241Z" }, + { url = "https://files.pythonhosted.org/packages/89/64/f4881554006bd12e4558bd66778bdd15d47b00a1f6c6e8b50f6208eda4b3/matplotlib-3.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f5f3ec4c191253c5f2b7c07096a142c6a1c024d9f738247bfc8e3f9643fc975", size = 9568954, upload-time = "2025-07-31T18:09:01.244Z" }, + { url = "https://files.pythonhosted.org/packages/06/f8/42779d39c3f757e1f012f2dda3319a89fb602bd2ef98ce8faf0281f4febd/matplotlib-3.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:707f9c292c4cd4716f19ab8a1f93f26598222cd931e0cd98fbbb1c5994bf7667", size = 8237465, upload-time = "2025-07-31T18:09:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/cf/f8/153fd06b5160f0cd27c8b9dd797fcc9fb56ac6a0ebf3c1f765b6b68d3c8a/matplotlib-3.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:21a95b9bf408178d372814de7baacd61c712a62cae560b5e6f35d791776f6516", size = 8108898, upload-time = "2025-07-31T18:09:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/c4b082a382a225fe0d2a73f1f57cf6f6f132308805b493a54c8641006238/matplotlib-3.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a6b310f95e1102a8c7c817ef17b60ee5d1851b8c71b63d9286b66b177963039e", size = 8295636, upload-time = "2025-07-31T18:09:07.306Z" }, + { url = "https://files.pythonhosted.org/packages/30/73/2195fa2099718b21a20da82dfc753bf2af58d596b51aefe93e359dd5915a/matplotlib-3.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:94986a242747a0605cb3ff1cb98691c736f28a59f8ffe5175acaeb7397c49a5a", size = 8158575, upload-time = "2025-07-31T18:09:09.083Z" }, + { url = "https://files.pythonhosted.org/packages/f6/e9/a08cdb34618a91fa08f75e6738541da5cacde7c307cea18ff10f0d03fcff/matplotlib-3.10.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ff10ea43288f0c8bab608a305dc6c918cc729d429c31dcbbecde3b9f4d5b569", size = 9522815, upload-time = "2025-07-31T18:09:11.191Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/34d8b7e0d1bb6d06ef45db01dfa560d5a67b1c40c0b998ce9ccde934bb09/matplotlib-3.10.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6adb644c9d040ffb0d3434e440490a66cf73dbfa118a6f79cd7568431f7a012", size = 9783514, upload-time = "2025-07-31T18:09:13.307Z" }, + { url = "https://files.pythonhosted.org/packages/12/09/d330d1e55dcca2e11b4d304cc5227f52e2512e46828d6249b88e0694176e/matplotlib-3.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4fa40a8f98428f789a9dcacd625f59b7bc4e3ef6c8c7c80187a7a709475cf592", size = 9573932, upload-time = "2025-07-31T18:09:15.335Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3b/f70258ac729aa004aca673800a53a2b0a26d49ca1df2eaa03289a1c40f81/matplotlib-3.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:95672a5d628b44207aab91ec20bf59c26da99de12b88f7e0b1fb0a84a86ff959", size = 8322003, upload-time = "2025-07-31T18:09:17.416Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/3601f8ce6d76a7c81c7f25a0e15fde0d6b66226dd187aa6d2838e6374161/matplotlib-3.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:2efaf97d72629e74252e0b5e3c46813e9eeaa94e011ecf8084a971a31a97f40b", size = 8153849, upload-time = "2025-07-31T18:09:19.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/eb/7d4c5de49eb78294e1a8e2be8a6ecff8b433e921b731412a56cd1abd3567/matplotlib-3.10.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b5fa2e941f77eb579005fb804026f9d0a1082276118d01cc6051d0d9626eaa7f", size = 8222360, upload-time = "2025-07-31T18:09:21.813Z" }, + { url = "https://files.pythonhosted.org/packages/16/8a/e435db90927b66b16d69f8f009498775f4469f8de4d14b87856965e58eba/matplotlib-3.10.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1fc0d2a3241cdcb9daaca279204a3351ce9df3c0e7e621c7e04ec28aaacaca30", size = 8087462, upload-time = "2025-07-31T18:09:23.504Z" }, + { url = "https://files.pythonhosted.org/packages/0b/dd/06c0e00064362f5647f318e00b435be2ff76a1bdced97c5eaf8347311fbe/matplotlib-3.10.5-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8dee65cb1424b7dc982fe87895b5613d4e691cc57117e8af840da0148ca6c1d7", size = 8659802, upload-time = "2025-07-31T18:09:25.256Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d6/e921be4e1a5f7aca5194e1f016cb67ec294548e530013251f630713e456d/matplotlib-3.10.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:160e125da27a749481eaddc0627962990f6029811dbeae23881833a011a0907f", size = 8233224, upload-time = "2025-07-31T18:09:27.512Z" }, + { url = "https://files.pythonhosted.org/packages/ec/74/a2b9b04824b9c349c8f1b2d21d5af43fa7010039427f2b133a034cb09e59/matplotlib-3.10.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac3d50760394d78a3c9be6b28318fe22b494c4fcf6407e8fd4794b538251899b", size = 8098539, upload-time = "2025-07-31T18:09:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/fc/66/cd29ebc7f6c0d2a15d216fb572573e8fc38bd5d6dec3bd9d7d904c0949f7/matplotlib-3.10.5-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6c49465bf689c4d59d174d0c7795fb42a21d4244d11d70e52b8011987367ac61", size = 8672192, upload-time = "2025-07-31T18:09:31.407Z" }, ] [[package]] @@ -1416,7 +1578,7 @@ wheels = [ [[package]] name = "mypy" -version = "1.16.1" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, @@ -1424,39 +1586,45 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/69/92c7fa98112e4d9eb075a239caa4ef4649ad7d441545ccffbd5e34607cbb/mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab", size = 3324747, upload-time = "2025-06-16T16:51:35.145Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/12/2bf23a80fcef5edb75de9a1e295d778e0f46ea89eb8b115818b663eff42b/mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a", size = 10958644, upload-time = "2025-06-16T16:51:11.649Z" }, - { url = "https://files.pythonhosted.org/packages/08/50/bfe47b3b278eacf348291742fd5e6613bbc4b3434b72ce9361896417cfe5/mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72", size = 10087033, upload-time = "2025-06-16T16:35:30.089Z" }, - { url = "https://files.pythonhosted.org/packages/21/de/40307c12fe25675a0776aaa2cdd2879cf30d99eec91b898de00228dc3ab5/mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea", size = 11875645, upload-time = "2025-06-16T16:35:48.49Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d8/85bdb59e4a98b7a31495bd8f1a4445d8ffc86cde4ab1f8c11d247c11aedc/mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574", size = 12616986, upload-time = "2025-06-16T16:48:39.526Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d0/bb25731158fa8f8ee9e068d3e94fcceb4971fedf1424248496292512afe9/mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d", size = 12878632, upload-time = "2025-06-16T16:36:08.195Z" }, - { url = "https://files.pythonhosted.org/packages/2d/11/822a9beb7a2b825c0cb06132ca0a5183f8327a5e23ef89717c9474ba0bc6/mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6", size = 9484391, upload-time = "2025-06-16T16:37:56.151Z" }, - { url = "https://files.pythonhosted.org/packages/9a/61/ec1245aa1c325cb7a6c0f8570a2eee3bfc40fa90d19b1267f8e50b5c8645/mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc", size = 10890557, upload-time = "2025-06-16T16:37:21.421Z" }, - { url = "https://files.pythonhosted.org/packages/6b/bb/6eccc0ba0aa0c7a87df24e73f0ad34170514abd8162eb0c75fd7128171fb/mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782", size = 10012921, upload-time = "2025-06-16T16:51:28.659Z" }, - { url = "https://files.pythonhosted.org/packages/5f/80/b337a12e2006715f99f529e732c5f6a8c143bb58c92bb142d5ab380963a5/mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507", size = 11802887, upload-time = "2025-06-16T16:50:53.627Z" }, - { url = "https://files.pythonhosted.org/packages/d9/59/f7af072d09793d581a745a25737c7c0a945760036b16aeb620f658a017af/mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca", size = 12531658, upload-time = "2025-06-16T16:33:55.002Z" }, - { url = "https://files.pythonhosted.org/packages/82/c4/607672f2d6c0254b94a646cfc45ad589dd71b04aa1f3d642b840f7cce06c/mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4", size = 12732486, upload-time = "2025-06-16T16:37:03.301Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5e/136555ec1d80df877a707cebf9081bd3a9f397dedc1ab9750518d87489ec/mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6", size = 9479482, upload-time = "2025-06-16T16:47:37.48Z" }, - { url = "https://files.pythonhosted.org/packages/b4/d6/39482e5fcc724c15bf6280ff5806548c7185e0c090712a3736ed4d07e8b7/mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d", size = 11066493, upload-time = "2025-06-16T16:47:01.683Z" }, - { url = "https://files.pythonhosted.org/packages/e6/e5/26c347890efc6b757f4d5bb83f4a0cf5958b8cf49c938ac99b8b72b420a6/mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9", size = 10081687, upload-time = "2025-06-16T16:48:19.367Z" }, - { url = "https://files.pythonhosted.org/packages/44/c7/b5cb264c97b86914487d6a24bd8688c0172e37ec0f43e93b9691cae9468b/mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79", size = 11839723, upload-time = "2025-06-16T16:49:20.912Z" }, - { url = "https://files.pythonhosted.org/packages/15/f8/491997a9b8a554204f834ed4816bda813aefda31cf873bb099deee3c9a99/mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15", size = 12722980, upload-time = "2025-06-16T16:37:40.929Z" }, - { url = "https://files.pythonhosted.org/packages/df/f0/2bd41e174b5fd93bc9de9a28e4fb673113633b8a7f3a607fa4a73595e468/mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd", size = 12903328, upload-time = "2025-06-16T16:34:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/61/81/5572108a7bec2c46b8aff7e9b524f371fe6ab5efb534d38d6b37b5490da8/mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b", size = 9562321, upload-time = "2025-06-16T16:48:58.823Z" }, - { url = "https://files.pythonhosted.org/packages/28/e3/96964af4a75a949e67df4b95318fe2b7427ac8189bbc3ef28f92a1c5bc56/mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438", size = 11063480, upload-time = "2025-06-16T16:47:56.205Z" }, - { url = "https://files.pythonhosted.org/packages/f5/4d/cd1a42b8e5be278fab7010fb289d9307a63e07153f0ae1510a3d7b703193/mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536", size = 10090538, upload-time = "2025-06-16T16:46:43.92Z" }, - { url = "https://files.pythonhosted.org/packages/c9/4f/c3c6b4b66374b5f68bab07c8cabd63a049ff69796b844bc759a0ca99bb2a/mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f", size = 11836839, upload-time = "2025-06-16T16:36:28.039Z" }, - { url = "https://files.pythonhosted.org/packages/b4/7e/81ca3b074021ad9775e5cb97ebe0089c0f13684b066a750b7dc208438403/mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359", size = 12715634, upload-time = "2025-06-16T16:50:34.441Z" }, - { url = "https://files.pythonhosted.org/packages/e9/95/bdd40c8be346fa4c70edb4081d727a54d0a05382d84966869738cfa8a497/mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be", size = 12895584, upload-time = "2025-06-16T16:34:54.857Z" }, - { url = "https://files.pythonhosted.org/packages/5a/fd/d486a0827a1c597b3b48b1bdef47228a6e9ee8102ab8c28f944cb83b65dc/mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee", size = 9573886, upload-time = "2025-06-16T16:36:43.589Z" }, - { url = "https://files.pythonhosted.org/packages/49/5e/ed1e6a7344005df11dfd58b0fdd59ce939a0ba9f7ed37754bf20670b74db/mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069", size = 10959511, upload-time = "2025-06-16T16:47:21.945Z" }, - { url = "https://files.pythonhosted.org/packages/30/88/a7cbc2541e91fe04f43d9e4577264b260fecedb9bccb64ffb1a34b7e6c22/mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da", size = 10075555, upload-time = "2025-06-16T16:50:14.084Z" }, - { url = "https://files.pythonhosted.org/packages/93/f7/c62b1e31a32fbd1546cca5e0a2e5f181be5761265ad1f2e94f2a306fa906/mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c", size = 11874169, upload-time = "2025-06-16T16:49:42.276Z" }, - { url = "https://files.pythonhosted.org/packages/c8/15/db580a28034657fb6cb87af2f8996435a5b19d429ea4dcd6e1c73d418e60/mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383", size = 12610060, upload-time = "2025-06-16T16:34:15.215Z" }, - { url = "https://files.pythonhosted.org/packages/ec/78/c17f48f6843048fa92d1489d3095e99324f2a8c420f831a04ccc454e2e51/mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40", size = 12875199, upload-time = "2025-06-16T16:35:14.448Z" }, - { url = "https://files.pythonhosted.org/packages/bc/d6/ed42167d0a42680381653fd251d877382351e1bd2c6dd8a818764be3beb1/mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b", size = 9487033, upload-time = "2025-06-16T16:49:57.907Z" }, - { url = "https://files.pythonhosted.org/packages/cf/d3/53e684e78e07c1a2bf7105715e5edd09ce951fc3f47cf9ed095ec1b7a037/mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37", size = 2265923, upload-time = "2025-06-16T16:48:02.366Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/a9/3d7aa83955617cdf02f94e50aab5c830d205cfa4320cf124ff64acce3a8e/mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972", size = 11003299, upload-time = "2025-07-31T07:54:06.425Z" }, + { url = "https://files.pythonhosted.org/packages/83/e8/72e62ff837dd5caaac2b4a5c07ce769c8e808a00a65e5d8f94ea9c6f20ab/mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7", size = 10125451, upload-time = "2025-07-31T07:53:52.974Z" }, + { url = "https://files.pythonhosted.org/packages/7d/10/f3f3543f6448db11881776f26a0ed079865926b0c841818ee22de2c6bbab/mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df", size = 11916211, upload-time = "2025-07-31T07:53:18.879Z" }, + { url = "https://files.pythonhosted.org/packages/06/bf/63e83ed551282d67bb3f7fea2cd5561b08d2bb6eb287c096539feb5ddbc5/mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390", size = 12652687, upload-time = "2025-07-31T07:53:30.544Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/68f2eeef11facf597143e85b694a161868b3b006a5fbad50e09ea117ef24/mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94", size = 12896322, upload-time = "2025-07-31T07:53:50.74Z" }, + { url = "https://files.pythonhosted.org/packages/a3/87/8e3e9c2c8bd0d7e071a89c71be28ad088aaecbadf0454f46a540bda7bca6/mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b", size = 9507962, upload-time = "2025-07-31T07:53:08.431Z" }, + { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" }, + { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" }, + { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" }, + { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" }, + { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" }, + { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" }, + { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" }, + { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" }, + { url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" }, + { url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" }, + { url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" }, + { url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/79c2fac86da57c7d8c48622a05873eaab40b905096c33597462713f5af90/mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733", size = 11040037, upload-time = "2025-07-31T07:54:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c3/adabe6ff53638e3cad19e3547268482408323b1e68bf082c9119000cd049/mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd", size = 10131550, upload-time = "2025-07-31T07:53:41.307Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c5/2e234c22c3bdeb23a7817af57a58865a39753bde52c74e2c661ee0cfc640/mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0", size = 11872963, upload-time = "2025-07-31T07:53:16.878Z" }, + { url = "https://files.pythonhosted.org/packages/ab/26/c13c130f35ca8caa5f2ceab68a247775648fdcd6c9a18f158825f2bc2410/mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a", size = 12710189, upload-time = "2025-07-31T07:54:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/82/df/c7d79d09f6de8383fe800521d066d877e54d30b4fb94281c262be2df84ef/mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91", size = 12900322, upload-time = "2025-07-31T07:53:10.551Z" }, + { url = "https://files.pythonhosted.org/packages/b8/98/3d5a48978b4f708c55ae832619addc66d677f6dc59f3ebad71bae8285ca6/mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed", size = 9751879, upload-time = "2025-07-31T07:52:56.683Z" }, + { url = "https://files.pythonhosted.org/packages/29/cb/673e3d34e5d8de60b3a61f44f80150a738bff568cd6b7efb55742a605e98/mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9", size = 10992466, upload-time = "2025-07-31T07:53:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d0/fe1895836eea3a33ab801561987a10569df92f2d3d4715abf2cfeaa29cb2/mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99", size = 10117638, upload-time = "2025-07-31T07:53:34.256Z" }, + { url = "https://files.pythonhosted.org/packages/97/f3/514aa5532303aafb95b9ca400a31054a2bd9489de166558c2baaeea9c522/mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8", size = 11915673, upload-time = "2025-07-31T07:52:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/c0805f0edec96fe8e2c048b03769a6291523d509be8ee7f56ae922fa3882/mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8", size = 12649022, upload-time = "2025-07-31T07:53:45.92Z" }, + { url = "https://files.pythonhosted.org/packages/45/3e/d646b5a298ada21a8512fa7e5531f664535a495efa672601702398cea2b4/mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259", size = 12895536, upload-time = "2025-07-31T07:53:06.17Z" }, + { url = "https://files.pythonhosted.org/packages/14/55/e13d0dcd276975927d1f4e9e2ec4fd409e199f01bdc671717e673cc63a22/mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d", size = 9512564, upload-time = "2025-07-31T07:53:12.346Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, ] [[package]] @@ -1608,64 +1776,87 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.0" +version = "2.3.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", "python_full_version == '3.11.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/db/8e12381333aea300890829a0a36bfa738cac95475d88982d538725143fd9/numpy-2.3.0.tar.gz", hash = "sha256:581f87f9e9e9db2cba2141400e160e9dd644ee248788d6f90636eeb8fd9260a6", size = 20382813, upload-time = "2025-06-07T14:54:32.608Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/5f/df67435257d827eb3b8af66f585223dc2c3f2eb7ad0b50cb1dae2f35f494/numpy-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3c9fdde0fa18afa1099d6257eb82890ea4f3102847e692193b54e00312a9ae9", size = 21199688, upload-time = "2025-06-07T14:36:52.067Z" }, - { url = "https://files.pythonhosted.org/packages/e5/ce/aad219575055d6c9ef29c8c540c81e1c38815d3be1fe09cdbe53d48ee838/numpy-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46d16f72c2192da7b83984aa5455baee640e33a9f1e61e656f29adf55e406c2b", size = 14359277, upload-time = "2025-06-07T14:37:15.325Z" }, - { url = "https://files.pythonhosted.org/packages/29/6b/2d31da8e6d2ec99bed54c185337a87f8fbeccc1cd9804e38217e92f3f5e2/numpy-2.3.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a0be278be9307c4ab06b788f2a077f05e180aea817b3e41cebbd5aaf7bd85ed3", size = 5376069, upload-time = "2025-06-07T14:37:25.636Z" }, - { url = "https://files.pythonhosted.org/packages/7d/2a/6c59a062397553ec7045c53d5fcdad44e4536e54972faa2ba44153bca984/numpy-2.3.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:99224862d1412d2562248d4710126355d3a8db7672170a39d6909ac47687a8a4", size = 6913057, upload-time = "2025-06-07T14:37:37.215Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5a/8df16f258d28d033e4f359e29d3aeb54663243ac7b71504e89deeb813202/numpy-2.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2393a914db64b0ead0ab80c962e42d09d5f385802006a6c87835acb1f58adb96", size = 14568083, upload-time = "2025-06-07T14:37:59.337Z" }, - { url = "https://files.pythonhosted.org/packages/0a/92/0528a563dfc2cdccdcb208c0e241a4bb500d7cde218651ffb834e8febc50/numpy-2.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7729c8008d55e80784bd113787ce876ca117185c579c0d626f59b87d433ea779", size = 16929402, upload-time = "2025-06-07T14:38:24.343Z" }, - { url = "https://files.pythonhosted.org/packages/e4/2f/e7a8c8d4a2212c527568d84f31587012cf5497a7271ea1f23332142f634e/numpy-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d4fb37a8d383b769281714897420c5cc3545c79dc427df57fc9b852ee0bf58", size = 15879193, upload-time = "2025-06-07T14:38:48.007Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c3/dada3f005953847fe35f42ac0fe746f6e1ea90b4c6775e4be605dcd7b578/numpy-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c39ec392b5db5088259c68250e342612db82dc80ce044cf16496cf14cf6bc6f8", size = 18665318, upload-time = "2025-06-07T14:39:15.794Z" }, - { url = "https://files.pythonhosted.org/packages/3b/ae/3f448517dedefc8dd64d803f9d51a8904a48df730e00a3c5fb1e75a60620/numpy-2.3.0-cp311-cp311-win32.whl", hash = "sha256:ee9d3ee70d62827bc91f3ea5eee33153212c41f639918550ac0475e3588da59f", size = 6601108, upload-time = "2025-06-07T14:39:27.176Z" }, - { url = "https://files.pythonhosted.org/packages/8c/4a/556406d2bb2b9874c8cbc840c962683ac28f21efbc9b01177d78f0199ca1/numpy-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:43c55b6a860b0eb44d42341438b03513cf3879cb3617afb749ad49307e164edd", size = 13021525, upload-time = "2025-06-07T14:39:46.637Z" }, - { url = "https://files.pythonhosted.org/packages/ed/ee/bf54278aef30335ffa9a189f869ea09e1a195b3f4b93062164a3b02678a7/numpy-2.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:2e6a1409eee0cb0316cb64640a49a49ca44deb1a537e6b1121dc7c458a1299a8", size = 10170327, upload-time = "2025-06-07T14:40:02.703Z" }, - { url = "https://files.pythonhosted.org/packages/89/59/9df493df81ac6f76e9f05cdbe013cdb0c9a37b434f6e594f5bd25e278908/numpy-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:389b85335838155a9076e9ad7f8fdba0827496ec2d2dc32ce69ce7898bde03ba", size = 20897025, upload-time = "2025-06-07T14:40:33.558Z" }, - { url = "https://files.pythonhosted.org/packages/2f/86/4ff04335901d6cf3a6bb9c748b0097546ae5af35e455ae9b962ebff4ecd7/numpy-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9498f60cd6bb8238d8eaf468a3d5bb031d34cd12556af53510f05fcf581c1b7e", size = 14129882, upload-time = "2025-06-07T14:40:55.034Z" }, - { url = "https://files.pythonhosted.org/packages/71/8d/a942cd4f959de7f08a79ab0c7e6cecb7431d5403dce78959a726f0f57aa1/numpy-2.3.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:622a65d40d8eb427d8e722fd410ac3ad4958002f109230bc714fa551044ebae2", size = 5110181, upload-time = "2025-06-07T14:41:04.4Z" }, - { url = "https://files.pythonhosted.org/packages/86/5d/45850982efc7b2c839c5626fb67fbbc520d5b0d7c1ba1ae3651f2f74c296/numpy-2.3.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b9446d9d8505aadadb686d51d838f2b6688c9e85636a0c3abaeb55ed54756459", size = 6647581, upload-time = "2025-06-07T14:41:14.695Z" }, - { url = "https://files.pythonhosted.org/packages/1a/c0/c871d4a83f93b00373d3eebe4b01525eee8ef10b623a335ec262b58f4dc1/numpy-2.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:50080245365d75137a2bf46151e975de63146ae6d79f7e6bd5c0e85c9931d06a", size = 14262317, upload-time = "2025-06-07T14:41:35.862Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f6/bc47f5fa666d5ff4145254f9e618d56e6a4ef9b874654ca74c19113bb538/numpy-2.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c24bb4113c66936eeaa0dc1e47c74770453d34f46ee07ae4efd853a2ed1ad10a", size = 16633919, upload-time = "2025-06-07T14:42:00.622Z" }, - { url = "https://files.pythonhosted.org/packages/f5/b4/65f48009ca0c9b76df5f404fccdea5a985a1bb2e34e97f21a17d9ad1a4ba/numpy-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4d8d294287fdf685281e671886c6dcdf0291a7c19db3e5cb4178d07ccf6ecc67", size = 15567651, upload-time = "2025-06-07T14:42:24.429Z" }, - { url = "https://files.pythonhosted.org/packages/f1/62/5367855a2018578e9334ed08252ef67cc302e53edc869666f71641cad40b/numpy-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6295f81f093b7f5769d1728a6bd8bf7466de2adfa771ede944ce6711382b89dc", size = 18361723, upload-time = "2025-06-07T14:42:51.167Z" }, - { url = "https://files.pythonhosted.org/packages/d4/75/5baed8cd867eabee8aad1e74d7197d73971d6a3d40c821f1848b8fab8b84/numpy-2.3.0-cp312-cp312-win32.whl", hash = "sha256:e6648078bdd974ef5d15cecc31b0c410e2e24178a6e10bf511e0557eed0f2570", size = 6318285, upload-time = "2025-06-07T14:43:02.052Z" }, - { url = "https://files.pythonhosted.org/packages/bc/49/d5781eaa1a15acb3b3a3f49dc9e2ff18d92d0ce5c2976f4ab5c0a7360250/numpy-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:0898c67a58cdaaf29994bc0e2c65230fd4de0ac40afaf1584ed0b02cd74c6fdd", size = 12732594, upload-time = "2025-06-07T14:43:21.071Z" }, - { url = "https://files.pythonhosted.org/packages/c2/1c/6d343e030815c7c97a1f9fbad00211b47717c7fe446834c224bd5311e6f1/numpy-2.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:bd8df082b6c4695753ad6193018c05aac465d634834dca47a3ae06d4bb22d9ea", size = 9891498, upload-time = "2025-06-07T14:43:36.332Z" }, - { url = "https://files.pythonhosted.org/packages/73/fc/1d67f751fd4dbafc5780244fe699bc4084268bad44b7c5deb0492473127b/numpy-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5754ab5595bfa2c2387d241296e0381c21f44a4b90a776c3c1d39eede13a746a", size = 20889633, upload-time = "2025-06-07T14:44:06.839Z" }, - { url = "https://files.pythonhosted.org/packages/e8/95/73ffdb69e5c3f19ec4530f8924c4386e7ba097efc94b9c0aff607178ad94/numpy-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d11fa02f77752d8099573d64e5fe33de3229b6632036ec08f7080f46b6649959", size = 14151683, upload-time = "2025-06-07T14:44:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/64/d5/06d4bb31bb65a1d9c419eb5676173a2f90fd8da3c59f816cc54c640ce265/numpy-2.3.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:aba48d17e87688a765ab1cd557882052f238e2f36545dfa8e29e6a91aef77afe", size = 5102683, upload-time = "2025-06-07T14:44:38.417Z" }, - { url = "https://files.pythonhosted.org/packages/12/8b/6c2cef44f8ccdc231f6b56013dff1d71138c48124334aded36b1a1b30c5a/numpy-2.3.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4dc58865623023b63b10d52f18abaac3729346a7a46a778381e0e3af4b7f3beb", size = 6640253, upload-time = "2025-06-07T14:44:49.359Z" }, - { url = "https://files.pythonhosted.org/packages/62/aa/fca4bf8de3396ddb59544df9b75ffe5b73096174de97a9492d426f5cd4aa/numpy-2.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:df470d376f54e052c76517393fa443758fefcdd634645bc9c1f84eafc67087f0", size = 14258658, upload-time = "2025-06-07T14:45:10.156Z" }, - { url = "https://files.pythonhosted.org/packages/1c/12/734dce1087eed1875f2297f687e671cfe53a091b6f2f55f0c7241aad041b/numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:87717eb24d4a8a64683b7a4e91ace04e2f5c7c77872f823f02a94feee186168f", size = 16628765, upload-time = "2025-06-07T14:45:35.076Z" }, - { url = "https://files.pythonhosted.org/packages/48/03/ffa41ade0e825cbcd5606a5669962419528212a16082763fc051a7247d76/numpy-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fa264d56882b59dcb5ea4d6ab6f31d0c58a57b41aec605848b6eb2ef4a43e8", size = 15564335, upload-time = "2025-06-07T14:45:58.797Z" }, - { url = "https://files.pythonhosted.org/packages/07/58/869398a11863310aee0ff85a3e13b4c12f20d032b90c4b3ee93c3b728393/numpy-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e651756066a0eaf900916497e20e02fe1ae544187cb0fe88de981671ee7f6270", size = 18360608, upload-time = "2025-06-07T14:46:25.687Z" }, - { url = "https://files.pythonhosted.org/packages/2f/8a/5756935752ad278c17e8a061eb2127c9a3edf4ba2c31779548b336f23c8d/numpy-2.3.0-cp313-cp313-win32.whl", hash = "sha256:e43c3cce3b6ae5f94696669ff2a6eafd9a6b9332008bafa4117af70f4b88be6f", size = 6310005, upload-time = "2025-06-07T14:50:13.138Z" }, - { url = "https://files.pythonhosted.org/packages/08/60/61d60cf0dfc0bf15381eaef46366ebc0c1a787856d1db0c80b006092af84/numpy-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:81ae0bf2564cf475f94be4a27ef7bcf8af0c3e28da46770fc904da9abd5279b5", size = 12729093, upload-time = "2025-06-07T14:50:31.82Z" }, - { url = "https://files.pythonhosted.org/packages/66/31/2f2f2d2b3e3c32d5753d01437240feaa32220b73258c9eef2e42a0832866/numpy-2.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:c8738baa52505fa6e82778580b23f945e3578412554d937093eac9205e845e6e", size = 9885689, upload-time = "2025-06-07T14:50:47.888Z" }, - { url = "https://files.pythonhosted.org/packages/f1/89/c7828f23cc50f607ceb912774bb4cff225ccae7131c431398ad8400e2c98/numpy-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:39b27d8b38942a647f048b675f134dd5a567f95bfff481f9109ec308515c51d8", size = 20986612, upload-time = "2025-06-07T14:46:56.077Z" }, - { url = "https://files.pythonhosted.org/packages/dd/46/79ecf47da34c4c50eedec7511e53d57ffdfd31c742c00be7dc1d5ffdb917/numpy-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0eba4a1ea88f9a6f30f56fdafdeb8da3774349eacddab9581a21234b8535d3d3", size = 14298953, upload-time = "2025-06-07T14:47:18.053Z" }, - { url = "https://files.pythonhosted.org/packages/59/44/f6caf50713d6ff4480640bccb2a534ce1d8e6e0960c8f864947439f0ee95/numpy-2.3.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0f1f11d0a1da54927436505a5a7670b154eac27f5672afc389661013dfe3d4f", size = 5225806, upload-time = "2025-06-07T14:47:27.524Z" }, - { url = "https://files.pythonhosted.org/packages/a6/43/e1fd1aca7c97e234dd05e66de4ab7a5be54548257efcdd1bc33637e72102/numpy-2.3.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:690d0a5b60a47e1f9dcec7b77750a4854c0d690e9058b7bef3106e3ae9117808", size = 6735169, upload-time = "2025-06-07T14:47:38.057Z" }, - { url = "https://files.pythonhosted.org/packages/84/89/f76f93b06a03177c0faa7ca94d0856c4e5c4bcaf3c5f77640c9ed0303e1c/numpy-2.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8b51ead2b258284458e570942137155978583e407babc22e3d0ed7af33ce06f8", size = 14330701, upload-time = "2025-06-07T14:47:59.113Z" }, - { url = "https://files.pythonhosted.org/packages/aa/f5/4858c3e9ff7a7d64561b20580cf7cc5d085794bd465a19604945d6501f6c/numpy-2.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:aaf81c7b82c73bd9b45e79cfb9476cb9c29e937494bfe9092c26aece812818ad", size = 16692983, upload-time = "2025-06-07T14:48:24.196Z" }, - { url = "https://files.pythonhosted.org/packages/08/17/0e3b4182e691a10e9483bcc62b4bb8693dbf9ea5dc9ba0b77a60435074bb/numpy-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f420033a20b4f6a2a11f585f93c843ac40686a7c3fa514060a97d9de93e5e72b", size = 15641435, upload-time = "2025-06-07T14:48:47.712Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d5/463279fda028d3c1efa74e7e8d507605ae87f33dbd0543cf4c4527c8b882/numpy-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d344ca32ab482bcf8735d8f95091ad081f97120546f3d250240868430ce52555", size = 18433798, upload-time = "2025-06-07T14:49:14.866Z" }, - { url = "https://files.pythonhosted.org/packages/0e/1e/7a9d98c886d4c39a2b4d3a7c026bffcf8fbcaf518782132d12a301cfc47a/numpy-2.3.0-cp313-cp313t-win32.whl", hash = "sha256:48a2e8eaf76364c32a1feaa60d6925eaf32ed7a040183b807e02674305beef61", size = 6438632, upload-time = "2025-06-07T14:49:25.67Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ab/66fc909931d5eb230107d016861824f335ae2c0533f422e654e5ff556784/numpy-2.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ba17f93a94e503551f154de210e4d50c5e3ee20f7e7a1b5f6ce3f22d419b93bb", size = 12868491, upload-time = "2025-06-07T14:49:44.898Z" }, - { url = "https://files.pythonhosted.org/packages/ee/e8/2c8a1c9e34d6f6d600c83d5ce5b71646c32a13f34ca5c518cc060639841c/numpy-2.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f14e016d9409680959691c109be98c436c6249eaf7f118b424679793607b5944", size = 9935345, upload-time = "2025-06-07T14:50:02.311Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a2/f8c1133f90eaa1c11bbbec1dc28a42054d0ce74bc2c9838c5437ba5d4980/numpy-2.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80b46117c7359de8167cc00a2c7d823bdd505e8c7727ae0871025a86d668283b", size = 21070759, upload-time = "2025-06-07T14:51:18.241Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e0/4c05fc44ba28463096eee5ae2a12832c8d2759cc5bcec34ae33386d3ff83/numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:5814a0f43e70c061f47abd5857d120179609ddc32a613138cbb6c4e9e2dbdda5", size = 5301054, upload-time = "2025-06-07T14:51:27.413Z" }, - { url = "https://files.pythonhosted.org/packages/8a/3b/6c06cdebe922bbc2a466fe2105f50f661238ea223972a69c7deb823821e7/numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ef6c1e88fd6b81ac6d215ed71dc8cd027e54d4bf1d2682d362449097156267a2", size = 6817520, upload-time = "2025-06-07T14:51:38.015Z" }, - { url = "https://files.pythonhosted.org/packages/9d/a3/1e536797fd10eb3c5dbd2e376671667c9af19e241843548575267242ea02/numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33a5a12a45bb82d9997e2c0b12adae97507ad7c347546190a18ff14c28bbca12", size = 14398078, upload-time = "2025-06-07T14:52:00.122Z" }, - { url = "https://files.pythonhosted.org/packages/7c/61/9d574b10d9368ecb1a0c923952aa593510a20df4940aa615b3a71337c8db/numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:54dfc8681c1906d239e95ab1508d0a533c4a9505e52ee2d71a5472b04437ef97", size = 16751324, upload-time = "2025-06-07T14:52:25.077Z" }, - { url = "https://files.pythonhosted.org/packages/39/de/bcad52ce972dc26232629ca3a99721fd4b22c1d2bda84d5db6541913ef9c/numpy-2.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e017a8a251ff4d18d71f139e28bdc7c31edba7a507f72b1414ed902cbe48c74d", size = 12924237, upload-time = "2025-06-07T14:52:44.713Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/26/1320083986108998bd487e2931eed2aeedf914b6e8905431487543ec911d/numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9", size = 21259016, upload-time = "2025-07-24T20:24:35.214Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2b/792b341463fa93fc7e55abbdbe87dac316c5b8cb5e94fb7a59fb6fa0cda5/numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168", size = 14451158, upload-time = "2025-07-24T20:24:58.397Z" }, + { url = "https://files.pythonhosted.org/packages/b7/13/e792d7209261afb0c9f4759ffef6135b35c77c6349a151f488f531d13595/numpy-2.3.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b", size = 5379817, upload-time = "2025-07-24T20:25:07.746Z" }, + { url = "https://files.pythonhosted.org/packages/49/ce/055274fcba4107c022b2113a213c7287346563f48d62e8d2a5176ad93217/numpy-2.3.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8", size = 6913606, upload-time = "2025-07-24T20:25:18.84Z" }, + { url = "https://files.pythonhosted.org/packages/17/f2/e4d72e6bc5ff01e2ab613dc198d560714971900c03674b41947e38606502/numpy-2.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d", size = 14589652, upload-time = "2025-07-24T20:25:40.356Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b0/fbeee3000a51ebf7222016e2939b5c5ecf8000a19555d04a18f1e02521b8/numpy-2.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3", size = 16938816, upload-time = "2025-07-24T20:26:05.721Z" }, + { url = "https://files.pythonhosted.org/packages/a9/ec/2f6c45c3484cc159621ea8fc000ac5a86f1575f090cac78ac27193ce82cd/numpy-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f", size = 16370512, upload-time = "2025-07-24T20:26:30.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/01/dd67cf511850bd7aefd6347aaae0956ed415abea741ae107834aae7d6d4e/numpy-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097", size = 18884947, upload-time = "2025-07-24T20:26:58.24Z" }, + { url = "https://files.pythonhosted.org/packages/a7/17/2cf60fd3e6a61d006778735edf67a222787a8c1a7842aed43ef96d777446/numpy-2.3.2-cp311-cp311-win32.whl", hash = "sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220", size = 6599494, upload-time = "2025-07-24T20:27:09.786Z" }, + { url = "https://files.pythonhosted.org/packages/d5/03/0eade211c504bda872a594f045f98ddcc6caef2b7c63610946845e304d3f/numpy-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170", size = 13087889, upload-time = "2025-07-24T20:27:29.558Z" }, + { url = "https://files.pythonhosted.org/packages/13/32/2c7979d39dafb2a25087e12310fc7f3b9d3c7d960df4f4bc97955ae0ce1d/numpy-2.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89", size = 10459560, upload-time = "2025-07-24T20:27:46.803Z" }, + { url = "https://files.pythonhosted.org/packages/00/6d/745dd1c1c5c284d17725e5c802ca4d45cfc6803519d777f087b71c9f4069/numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b", size = 20956420, upload-time = "2025-07-24T20:28:18.002Z" }, + { url = "https://files.pythonhosted.org/packages/bc/96/e7b533ea5740641dd62b07a790af5d9d8fec36000b8e2d0472bd7574105f/numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f", size = 14184660, upload-time = "2025-07-24T20:28:39.522Z" }, + { url = "https://files.pythonhosted.org/packages/2b/53/102c6122db45a62aa20d1b18c9986f67e6b97e0d6fbc1ae13e3e4c84430c/numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0", size = 5113382, upload-time = "2025-07-24T20:28:48.544Z" }, + { url = "https://files.pythonhosted.org/packages/2b/21/376257efcbf63e624250717e82b4fae93d60178f09eb03ed766dbb48ec9c/numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b", size = 6647258, upload-time = "2025-07-24T20:28:59.104Z" }, + { url = "https://files.pythonhosted.org/packages/91/ba/f4ebf257f08affa464fe6036e13f2bf9d4642a40228781dc1235da81be9f/numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370", size = 14281409, upload-time = "2025-07-24T20:40:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/59/ef/f96536f1df42c668cbacb727a8c6da7afc9c05ece6d558927fb1722693e1/numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73", size = 16641317, upload-time = "2025-07-24T20:40:56.625Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a7/af813a7b4f9a42f498dde8a4c6fcbff8100eed00182cc91dbaf095645f38/numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc", size = 16056262, upload-time = "2025-07-24T20:41:20.797Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5d/41c4ef8404caaa7f05ed1cfb06afe16a25895260eacbd29b4d84dff2920b/numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be", size = 18579342, upload-time = "2025-07-24T20:41:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/9950e44c5a11636f4a3af6e825ec23003475cc9a466edb7a759ed3ea63bd/numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036", size = 6320610, upload-time = "2025-07-24T20:42:01.551Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2f/244643a5ce54a94f0a9a2ab578189c061e4a87c002e037b0829dd77293b6/numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f", size = 12786292, upload-time = "2025-07-24T20:42:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/54/cd/7b5f49d5d78db7badab22d8323c1b6ae458fbf86c4fdfa194ab3cd4eb39b/numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07", size = 10194071, upload-time = "2025-07-24T20:42:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074, upload-time = "2025-07-24T20:43:07.813Z" }, + { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311, upload-time = "2025-07-24T20:43:29.335Z" }, + { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022, upload-time = "2025-07-24T20:43:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135, upload-time = "2025-07-24T20:43:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147, upload-time = "2025-07-24T20:44:10.328Z" }, + { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989, upload-time = "2025-07-24T20:44:34.88Z" }, + { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052, upload-time = "2025-07-24T20:44:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955, upload-time = "2025-07-24T20:45:26.714Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843, upload-time = "2025-07-24T20:49:24.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876, upload-time = "2025-07-24T20:49:43.227Z" }, + { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786, upload-time = "2025-07-24T20:49:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395, upload-time = "2025-07-24T20:45:58.821Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374, upload-time = "2025-07-24T20:46:20.207Z" }, + { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864, upload-time = "2025-07-24T20:46:30.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533, upload-time = "2025-07-24T20:46:46.111Z" }, + { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007, upload-time = "2025-07-24T20:47:07.1Z" }, + { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914, upload-time = "2025-07-24T20:47:32.459Z" }, + { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708, upload-time = "2025-07-24T20:47:58.129Z" }, + { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678, upload-time = "2025-07-24T20:48:25.402Z" }, + { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832, upload-time = "2025-07-24T20:48:37.181Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049, upload-time = "2025-07-24T20:48:56.24Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935, upload-time = "2025-07-24T20:49:13.136Z" }, + { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906, upload-time = "2025-07-24T20:50:30.346Z" }, + { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607, upload-time = "2025-07-24T20:50:51.923Z" }, + { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110, upload-time = "2025-07-24T20:51:01.041Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050, upload-time = "2025-07-24T20:51:11.64Z" }, + { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292, upload-time = "2025-07-24T20:51:33.488Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913, upload-time = "2025-07-24T20:51:58.517Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180, upload-time = "2025-07-24T20:52:22.827Z" }, + { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809, upload-time = "2025-07-24T20:52:51.015Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410, upload-time = "2025-07-24T20:56:44.949Z" }, + { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821, upload-time = "2025-07-24T20:57:06.479Z" }, + { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303, upload-time = "2025-07-24T20:57:22.879Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524, upload-time = "2025-07-24T20:53:22.086Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519, upload-time = "2025-07-24T20:53:44.053Z" }, + { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972, upload-time = "2025-07-24T20:53:53.81Z" }, + { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439, upload-time = "2025-07-24T20:54:04.742Z" }, + { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479, upload-time = "2025-07-24T20:54:25.819Z" }, + { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805, upload-time = "2025-07-24T20:54:50.814Z" }, + { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830, upload-time = "2025-07-24T20:55:17.306Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665, upload-time = "2025-07-24T20:55:46.665Z" }, + { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777, upload-time = "2025-07-24T20:55:57.66Z" }, + { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856, upload-time = "2025-07-24T20:56:17.318Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226, upload-time = "2025-07-24T20:56:34.509Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ea/50ebc91d28b275b23b7128ef25c3d08152bc4068f42742867e07a870a42a/numpy-2.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15", size = 21130338, upload-time = "2025-07-24T20:57:54.37Z" }, + { url = "https://files.pythonhosted.org/packages/9f/57/cdd5eac00dd5f137277355c318a955c0d8fb8aa486020c22afd305f8b88f/numpy-2.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec", size = 14375776, upload-time = "2025-07-24T20:58:16.303Z" }, + { url = "https://files.pythonhosted.org/packages/83/85/27280c7f34fcd305c2209c0cdca4d70775e4859a9eaa92f850087f8dea50/numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712", size = 5304882, upload-time = "2025-07-24T20:58:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/48/b4/6500b24d278e15dd796f43824e69939d00981d37d9779e32499e823aa0aa/numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c", size = 6818405, upload-time = "2025-07-24T20:58:37.341Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c9/142c1e03f199d202da8e980c2496213509291b6024fd2735ad28ae7065c7/numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296", size = 14419651, upload-time = "2025-07-24T20:58:59.048Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8023e87cbea31a750a6c00ff9427d65ebc5fef104a136bfa69f76266d614/numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981", size = 16760166, upload-time = "2025-07-24T21:28:56.38Z" }, + { url = "https://files.pythonhosted.org/packages/78/e3/6690b3f85a05506733c7e90b577e4762517404ea78bab2ca3a5cb1aeb78d/numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619", size = 12977811, upload-time = "2025-07-24T21:29:18.234Z" }, ] [[package]] @@ -1679,59 +1870,59 @@ wheels = [ [[package]] name = "pandas" -version = "2.3.0" +version = "2.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "python-dateutil" }, { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/51/48f713c4c728d7c55ef7444ba5ea027c26998d96d1a40953b346438602fc/pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133", size = 4484490, upload-time = "2025-06-05T03:27:54.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/2d/df6b98c736ba51b8eaa71229e8fcd91233a831ec00ab520e1e23090cc072/pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634", size = 11527531, upload-time = "2025-06-05T03:25:48.648Z" }, - { url = "https://files.pythonhosted.org/packages/77/1c/3f8c331d223f86ba1d0ed7d3ed7fcf1501c6f250882489cc820d2567ddbf/pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675", size = 10774764, upload-time = "2025-06-05T03:25:53.228Z" }, - { url = "https://files.pythonhosted.org/packages/1b/45/d2599400fad7fe06b849bd40b52c65684bc88fbe5f0a474d0513d057a377/pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2", size = 11711963, upload-time = "2025-06-05T03:25:56.855Z" }, - { url = "https://files.pythonhosted.org/packages/66/f8/5508bc45e994e698dbc93607ee6b9b6eb67df978dc10ee2b09df80103d9e/pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e", size = 12349446, upload-time = "2025-06-05T03:26:01.292Z" }, - { url = "https://files.pythonhosted.org/packages/f7/fc/17851e1b1ea0c8456ba90a2f514c35134dd56d981cf30ccdc501a0adeac4/pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1", size = 12920002, upload-time = "2025-06-06T00:00:07.925Z" }, - { url = "https://files.pythonhosted.org/packages/a1/9b/8743be105989c81fa33f8e2a4e9822ac0ad4aaf812c00fee6bb09fc814f9/pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6", size = 13651218, upload-time = "2025-06-05T03:26:09.731Z" }, - { url = "https://files.pythonhosted.org/packages/26/fa/8eeb2353f6d40974a6a9fd4081ad1700e2386cf4264a8f28542fd10b3e38/pandas-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:40cecc4ea5abd2921682b57532baea5588cc5f80f0231c624056b146887274d2", size = 11082485, upload-time = "2025-06-05T03:26:17.572Z" }, - { url = "https://files.pythonhosted.org/packages/96/1e/ba313812a699fe37bf62e6194265a4621be11833f5fce46d9eae22acb5d7/pandas-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8adff9f138fc614347ff33812046787f7d43b3cef7c0f0171b3340cae333f6ca", size = 11551836, upload-time = "2025-06-05T03:26:22.784Z" }, - { url = "https://files.pythonhosted.org/packages/1b/cc/0af9c07f8d714ea563b12383a7e5bde9479cf32413ee2f346a9c5a801f22/pandas-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5f08eb9a445d07720776df6e641975665c9ea12c9d8a331e0f6890f2dcd76ef", size = 10807977, upload-time = "2025-06-05T16:50:11.109Z" }, - { url = "https://files.pythonhosted.org/packages/ee/3e/8c0fb7e2cf4a55198466ced1ca6a9054ae3b7e7630df7757031df10001fd/pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d", size = 11788230, upload-time = "2025-06-05T03:26:27.417Z" }, - { url = "https://files.pythonhosted.org/packages/14/22/b493ec614582307faf3f94989be0f7f0a71932ed6f56c9a80c0bb4a3b51e/pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46", size = 12370423, upload-time = "2025-06-05T03:26:34.142Z" }, - { url = "https://files.pythonhosted.org/packages/9f/74/b012addb34cda5ce855218a37b258c4e056a0b9b334d116e518d72638737/pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33", size = 12990594, upload-time = "2025-06-06T00:00:13.934Z" }, - { url = "https://files.pythonhosted.org/packages/95/81/b310e60d033ab64b08e66c635b94076488f0b6ce6a674379dd5b224fc51c/pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c", size = 13745952, upload-time = "2025-06-05T03:26:39.475Z" }, - { url = "https://files.pythonhosted.org/packages/25/ac/f6ee5250a8881b55bd3aecde9b8cfddea2f2b43e3588bca68a4e9aaf46c8/pandas-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa07e138b3f6c04addfeaf56cc7fdb96c3b68a3fe5e5401251f231fce40a0d7a", size = 11094534, upload-time = "2025-06-05T03:26:43.23Z" }, - { url = "https://files.pythonhosted.org/packages/94/46/24192607058dd607dbfacdd060a2370f6afb19c2ccb617406469b9aeb8e7/pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf", size = 11573865, upload-time = "2025-06-05T03:26:46.774Z" }, - { url = "https://files.pythonhosted.org/packages/9f/cc/ae8ea3b800757a70c9fdccc68b67dc0280a6e814efcf74e4211fd5dea1ca/pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027", size = 10702154, upload-time = "2025-06-05T16:50:14.439Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ba/a7883d7aab3d24c6540a2768f679e7414582cc389876d469b40ec749d78b/pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09", size = 11262180, upload-time = "2025-06-05T16:50:17.453Z" }, - { url = "https://files.pythonhosted.org/packages/01/a5/931fc3ad333d9d87b10107d948d757d67ebcfc33b1988d5faccc39c6845c/pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d", size = 11991493, upload-time = "2025-06-05T03:26:51.813Z" }, - { url = "https://files.pythonhosted.org/packages/d7/bf/0213986830a92d44d55153c1d69b509431a972eb73f204242988c4e66e86/pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20", size = 12470733, upload-time = "2025-06-06T00:00:18.651Z" }, - { url = "https://files.pythonhosted.org/packages/a4/0e/21eb48a3a34a7d4bac982afc2c4eb5ab09f2d988bdf29d92ba9ae8e90a79/pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b", size = 13212406, upload-time = "2025-06-05T03:26:55.992Z" }, - { url = "https://files.pythonhosted.org/packages/1f/d9/74017c4eec7a28892d8d6e31ae9de3baef71f5a5286e74e6b7aad7f8c837/pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be", size = 10976199, upload-time = "2025-06-05T03:26:59.594Z" }, - { url = "https://files.pythonhosted.org/packages/d3/57/5cb75a56a4842bbd0511c3d1c79186d8315b82dac802118322b2de1194fe/pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983", size = 11518913, upload-time = "2025-06-05T03:27:02.757Z" }, - { url = "https://files.pythonhosted.org/packages/05/01/0c8785610e465e4948a01a059562176e4c8088aa257e2e074db868f86d4e/pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd", size = 10655249, upload-time = "2025-06-05T16:50:20.17Z" }, - { url = "https://files.pythonhosted.org/packages/e8/6a/47fd7517cd8abe72a58706aab2b99e9438360d36dcdb052cf917b7bf3bdc/pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f", size = 11328359, upload-time = "2025-06-05T03:27:06.431Z" }, - { url = "https://files.pythonhosted.org/packages/2a/b3/463bfe819ed60fb7e7ddffb4ae2ee04b887b3444feee6c19437b8f834837/pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3", size = 12024789, upload-time = "2025-06-05T03:27:09.875Z" }, - { url = "https://files.pythonhosted.org/packages/04/0c/e0704ccdb0ac40aeb3434d1c641c43d05f75c92e67525df39575ace35468/pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8", size = 12480734, upload-time = "2025-06-06T00:00:22.246Z" }, - { url = "https://files.pythonhosted.org/packages/e9/df/815d6583967001153bb27f5cf075653d69d51ad887ebbf4cfe1173a1ac58/pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9", size = 13223381, upload-time = "2025-06-05T03:27:15.641Z" }, - { url = "https://files.pythonhosted.org/packages/79/88/ca5973ed07b7f484c493e941dbff990861ca55291ff7ac67c815ce347395/pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390", size = 10970135, upload-time = "2025-06-05T03:27:24.131Z" }, - { url = "https://files.pythonhosted.org/packages/24/fb/0994c14d1f7909ce83f0b1fb27958135513c4f3f2528bde216180aa73bfc/pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575", size = 12141356, upload-time = "2025-06-05T03:27:34.547Z" }, - { url = "https://files.pythonhosted.org/packages/9d/a2/9b903e5962134497ac4f8a96f862ee3081cb2506f69f8e4778ce3d9c9d82/pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042", size = 11474674, upload-time = "2025-06-05T03:27:39.448Z" }, - { url = "https://files.pythonhosted.org/packages/81/3a/3806d041bce032f8de44380f866059437fb79e36d6b22c82c187e65f765b/pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c", size = 11439876, upload-time = "2025-06-05T03:27:43.652Z" }, - { url = "https://files.pythonhosted.org/packages/15/aa/3fc3181d12b95da71f5c2537c3e3b3af6ab3a8c392ab41ebb766e0929bc6/pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67", size = 11966182, upload-time = "2025-06-05T03:27:47.652Z" }, - { url = "https://files.pythonhosted.org/packages/37/e7/e12f2d9b0a2c4a2cc86e2aabff7ccfd24f03e597d770abfa2acd313ee46b/pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f", size = 12547686, upload-time = "2025-06-06T00:00:26.142Z" }, - { url = "https://files.pythonhosted.org/packages/39/c2/646d2e93e0af70f4e5359d870a63584dacbc324b54d73e6b3267920ff117/pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249", size = 13231847, upload-time = "2025-06-05T03:27:51.465Z" }, - { url = "https://files.pythonhosted.org/packages/38/86/d786690bd1d666d3369355a173b32a4ab7a83053cbb2d6a24ceeedb31262/pandas-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9efc0acbbffb5236fbdf0409c04edce96bec4bdaa649d49985427bd1ec73e085", size = 11552206, upload-time = "2025-06-06T00:00:29.501Z" }, - { url = "https://files.pythonhosted.org/packages/9c/2f/99f581c1c5b013fcfcbf00a48f5464fb0105da99ea5839af955e045ae3ab/pandas-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75651c14fde635e680496148a8526b328e09fe0572d9ae9b638648c46a544ba3", size = 10796831, upload-time = "2025-06-06T00:00:49.502Z" }, - { url = "https://files.pythonhosted.org/packages/5c/be/3ee7f424367e0f9e2daee93a3145a18b703fbf733ba56e1cf914af4b40d1/pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14", size = 11736943, upload-time = "2025-06-06T00:01:15.992Z" }, - { url = "https://files.pythonhosted.org/packages/83/95/81c7bb8f1aefecd948f80464177a7d9a1c5e205c5a1e279984fdacbac9de/pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324", size = 12366679, upload-time = "2025-06-06T00:01:36.162Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/54cf52fb454408317136d683a736bb597864db74977efee05e63af0a7d38/pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34", size = 12924072, upload-time = "2025-06-06T00:01:44.243Z" }, - { url = "https://files.pythonhosted.org/packages/0a/bf/25018e431257f8a42c173080f9da7c592508269def54af4a76ccd1c14420/pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb", size = 13696374, upload-time = "2025-06-06T00:02:14.346Z" }, - { url = "https://files.pythonhosted.org/packages/db/84/5ffd2c447c02db56326f5c19a235a747fae727e4842cc20e1ddd28f990f6/pandas-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b198687ca9c8529662213538a9bb1e60fa0bf0f6af89292eb68fea28743fcd5a", size = 11104735, upload-time = "2025-06-06T00:02:21.088Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493, upload-time = "2025-07-07T19:20:04.079Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ca/aa97b47287221fa37a49634532e520300088e290b20d690b21ce3e448143/pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9", size = 11542731, upload-time = "2025-07-07T19:18:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/80/bf/7938dddc5f01e18e573dcfb0f1b8c9357d9b5fa6ffdee6e605b92efbdff2/pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1", size = 10790031, upload-time = "2025-07-07T19:18:16.611Z" }, + { url = "https://files.pythonhosted.org/packages/ee/2f/9af748366763b2a494fed477f88051dbf06f56053d5c00eba652697e3f94/pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0", size = 11724083, upload-time = "2025-07-07T19:18:20.512Z" }, + { url = "https://files.pythonhosted.org/packages/2c/95/79ab37aa4c25d1e7df953dde407bb9c3e4ae47d154bc0dd1692f3a6dcf8c/pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191", size = 12342360, upload-time = "2025-07-07T19:18:23.194Z" }, + { url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1", size = 13202098, upload-time = "2025-07-07T19:18:25.558Z" }, + { url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97", size = 13837228, upload-time = "2025-07-07T19:18:28.344Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83", size = 11336561, upload-time = "2025-07-07T19:18:31.211Z" }, + { url = "https://files.pythonhosted.org/packages/76/1c/ccf70029e927e473a4476c00e0d5b32e623bff27f0402d0a92b7fc29bb9f/pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b", size = 11566608, upload-time = "2025-07-07T19:18:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/3c37cb724d76a841f14b8f5fe57e5e3645207cc67370e4f84717e8bb7657/pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f", size = 10823181, upload-time = "2025-07-07T19:18:36.151Z" }, + { url = "https://files.pythonhosted.org/packages/8a/4c/367c98854a1251940edf54a4df0826dcacfb987f9068abf3e3064081a382/pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85", size = 11793570, upload-time = "2025-07-07T19:18:38.385Z" }, + { url = "https://files.pythonhosted.org/packages/07/5f/63760ff107bcf5146eee41b38b3985f9055e710a72fdd637b791dea3495c/pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d", size = 12378887, upload-time = "2025-07-07T19:18:41.284Z" }, + { url = "https://files.pythonhosted.org/packages/15/53/f31a9b4dfe73fe4711c3a609bd8e60238022f48eacedc257cd13ae9327a7/pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678", size = 13230957, upload-time = "2025-07-07T19:18:44.187Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/6fce6bf85b5056d065e0a7933cba2616dcb48596f7ba3c6341ec4bcc529d/pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299", size = 13883883, upload-time = "2025-07-07T19:18:46.498Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7b/bdcb1ed8fccb63d04bdb7635161d0ec26596d92c9d7a6cce964e7876b6c1/pandas-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab", size = 11340212, upload-time = "2025-07-07T19:18:49.293Z" }, + { url = "https://files.pythonhosted.org/packages/46/de/b8445e0f5d217a99fe0eeb2f4988070908979bec3587c0633e5428ab596c/pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3", size = 11588172, upload-time = "2025-07-07T19:18:52.054Z" }, + { url = "https://files.pythonhosted.org/packages/1e/e0/801cdb3564e65a5ac041ab99ea6f1d802a6c325bb6e58c79c06a3f1cd010/pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232", size = 10717365, upload-time = "2025-07-07T19:18:54.785Z" }, + { url = "https://files.pythonhosted.org/packages/51/a5/c76a8311833c24ae61a376dbf360eb1b1c9247a5d9c1e8b356563b31b80c/pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e", size = 11280411, upload-time = "2025-07-07T19:18:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/da/01/e383018feba0a1ead6cf5fe8728e5d767fee02f06a3d800e82c489e5daaf/pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4", size = 11988013, upload-time = "2025-07-07T19:18:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8", size = 12767210, upload-time = "2025-07-07T19:19:02.944Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679", size = 13440571, upload-time = "2025-07-07T19:19:06.82Z" }, + { url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8", size = 10987601, upload-time = "2025-07-07T19:19:09.589Z" }, + { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393, upload-time = "2025-07-07T19:19:12.245Z" }, + { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750, upload-time = "2025-07-07T19:19:14.612Z" }, + { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004, upload-time = "2025-07-07T19:19:16.857Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869, upload-time = "2025-07-07T19:19:19.265Z" }, + { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218, upload-time = "2025-07-07T19:19:21.547Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763, upload-time = "2025-07-07T19:19:23.939Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482, upload-time = "2025-07-07T19:19:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159, upload-time = "2025-07-07T19:19:26.362Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287, upload-time = "2025-07-07T19:19:29.157Z" }, + { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381, upload-time = "2025-07-07T19:19:31.436Z" }, + { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998, upload-time = "2025-07-07T19:19:34.267Z" }, + { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705, upload-time = "2025-07-07T19:19:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044, upload-time = "2025-07-07T19:19:39.999Z" }, + { url = "https://files.pythonhosted.org/packages/6e/21/ecf2df680982616459409b09962a8c2065330c7151dc6538069f3b634acf/pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8", size = 11567275, upload-time = "2025-07-07T19:19:45.152Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1a/dcb50e44b75419e96b276c9fb023b0f147b3c411be1cd517492aa2a184d4/pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3", size = 10811488, upload-time = "2025-07-07T19:19:47.797Z" }, + { url = "https://files.pythonhosted.org/packages/2d/55/66cd2b679f6a27398380eac7574bc24746128f74626a3c02b978ea00e5ce/pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da", size = 11763000, upload-time = "2025-07-07T19:19:50.83Z" }, + { url = "https://files.pythonhosted.org/packages/ae/1c/5b9b263c80fd5e231b77df6f78cd7426d1d4ad3a4e858e85b7b3d93d0e9c/pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e", size = 12361395, upload-time = "2025-07-07T19:19:53.714Z" }, + { url = "https://files.pythonhosted.org/packages/f7/74/7e817b31413fbb96366ea327d43d1926a9c48c58074e27e094e2839a0e36/pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7", size = 13225086, upload-time = "2025-07-07T19:19:56.378Z" }, + { url = "https://files.pythonhosted.org/packages/1f/0f/bc0a44b47eba2f22ae4235719a573d552ef7ad76ed3ea39ae62d554e040b/pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88", size = 13871698, upload-time = "2025-07-07T19:19:58.854Z" }, + { url = "https://files.pythonhosted.org/packages/fa/cb/6c32f8fadefa4314b740fbe8f74f6a02423bd1549e7c930826df35ac3c1b/pandas-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d", size = 11357186, upload-time = "2025-07-07T19:20:01.475Z" }, ] [[package]] @@ -1752,7 +1943,7 @@ wheels = [ [[package]] name = "pandas-stubs" -version = "2.2.3.250527" +version = "2.3.0.250703" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", @@ -1761,12 +1952,12 @@ resolution-markers = [ ] dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "types-pytz", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312, upload-time = "2025-05-27T15:24:29.716Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/df/c1c51c5cec087b8f4d04669308b700e9648745a77cdd0c8c5e16520703ca/pandas_stubs-2.3.0.250703.tar.gz", hash = "sha256:fb6a8478327b16ed65c46b1541de74f5c5947f3601850caf3e885e0140584717", size = 103910, upload-time = "2025-07-02T17:49:11.667Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/75/cb/09d5f9bf7c8659af134ae0ffc1a349038a5d0ff93e45aedc225bde2872a3/pandas_stubs-2.3.0.250703-py3-none-any.whl", hash = "sha256:a9265fc69909f0f7a9cabc5f596d86c9d531499fed86b7838fd3278285d76b81", size = 154719, upload-time = "2025-07-02T17:49:10.697Z" }, ] [[package]] @@ -1801,90 +1992,115 @@ wheels = [ [[package]] name = "pillow" -version = "11.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", size = 47026707, upload-time = "2025-04-12T17:50:03.289Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/8b/b158ad57ed44d3cc54db8d68ad7c0a58b8fc0e4c7a3f995f9d62d5b464a1/pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047", size = 3198442, upload-time = "2025-04-12T17:47:10.666Z" }, - { url = "https://files.pythonhosted.org/packages/b1/f8/bb5d956142f86c2d6cc36704943fa761f2d2e4c48b7436fd0a85c20f1713/pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95", size = 3030553, upload-time = "2025-04-12T17:47:13.153Z" }, - { url = "https://files.pythonhosted.org/packages/22/7f/0e413bb3e2aa797b9ca2c5c38cb2e2e45d88654e5b12da91ad446964cfae/pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61", size = 4405503, upload-time = "2025-04-12T17:47:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b4/cc647f4d13f3eb837d3065824aa58b9bcf10821f029dc79955ee43f793bd/pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1", size = 4490648, upload-time = "2025-04-12T17:47:17.37Z" }, - { url = "https://files.pythonhosted.org/packages/c2/6f/240b772a3b35cdd7384166461567aa6713799b4e78d180c555bd284844ea/pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c", size = 4508937, upload-time = "2025-04-12T17:47:19.066Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/7ca9c815ade5fdca18853db86d812f2f188212792780208bdb37a0a6aef4/pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d", size = 4599802, upload-time = "2025-04-12T17:47:21.404Z" }, - { url = "https://files.pythonhosted.org/packages/02/81/c3d9d38ce0c4878a77245d4cf2c46d45a4ad0f93000227910a46caff52f3/pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97", size = 4576717, upload-time = "2025-04-12T17:47:23.571Z" }, - { url = "https://files.pythonhosted.org/packages/42/49/52b719b89ac7da3185b8d29c94d0e6aec8140059e3d8adcaa46da3751180/pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579", size = 4654874, upload-time = "2025-04-12T17:47:25.783Z" }, - { url = "https://files.pythonhosted.org/packages/5b/0b/ede75063ba6023798267023dc0d0401f13695d228194d2242d5a7ba2f964/pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d", size = 2331717, upload-time = "2025-04-12T17:47:28.922Z" }, - { url = "https://files.pythonhosted.org/packages/ed/3c/9831da3edea527c2ed9a09f31a2c04e77cd705847f13b69ca60269eec370/pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad", size = 2676204, upload-time = "2025-04-12T17:47:31.283Z" }, - { url = "https://files.pythonhosted.org/packages/01/97/1f66ff8a1503d8cbfc5bae4dc99d54c6ec1e22ad2b946241365320caabc2/pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2", size = 2414767, upload-time = "2025-04-12T17:47:34.655Z" }, - { url = "https://files.pythonhosted.org/packages/68/08/3fbf4b98924c73037a8e8b4c2c774784805e0fb4ebca6c5bb60795c40125/pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70", size = 3198450, upload-time = "2025-04-12T17:47:37.135Z" }, - { url = "https://files.pythonhosted.org/packages/84/92/6505b1af3d2849d5e714fc75ba9e69b7255c05ee42383a35a4d58f576b16/pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf", size = 3030550, upload-time = "2025-04-12T17:47:39.345Z" }, - { url = "https://files.pythonhosted.org/packages/3c/8c/ac2f99d2a70ff966bc7eb13dacacfaab57c0549b2ffb351b6537c7840b12/pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7", size = 4415018, upload-time = "2025-04-12T17:47:41.128Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e3/0a58b5d838687f40891fff9cbaf8669f90c96b64dc8f91f87894413856c6/pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8", size = 4498006, upload-time = "2025-04-12T17:47:42.912Z" }, - { url = "https://files.pythonhosted.org/packages/21/f5/6ba14718135f08fbfa33308efe027dd02b781d3f1d5c471444a395933aac/pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600", size = 4517773, upload-time = "2025-04-12T17:47:44.611Z" }, - { url = "https://files.pythonhosted.org/packages/20/f2/805ad600fc59ebe4f1ba6129cd3a75fb0da126975c8579b8f57abeb61e80/pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788", size = 4607069, upload-time = "2025-04-12T17:47:46.46Z" }, - { url = "https://files.pythonhosted.org/packages/71/6b/4ef8a288b4bb2e0180cba13ca0a519fa27aa982875882392b65131401099/pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e", size = 4583460, upload-time = "2025-04-12T17:47:49.255Z" }, - { url = "https://files.pythonhosted.org/packages/62/ae/f29c705a09cbc9e2a456590816e5c234382ae5d32584f451c3eb41a62062/pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e", size = 4661304, upload-time = "2025-04-12T17:47:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1a/c8217b6f2f73794a5e219fbad087701f412337ae6dbb956db37d69a9bc43/pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6", size = 2331809, upload-time = "2025-04-12T17:47:54.425Z" }, - { url = "https://files.pythonhosted.org/packages/e2/72/25a8f40170dc262e86e90f37cb72cb3de5e307f75bf4b02535a61afcd519/pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193", size = 2676338, upload-time = "2025-04-12T17:47:56.535Z" }, - { url = "https://files.pythonhosted.org/packages/06/9e/76825e39efee61efea258b479391ca77d64dbd9e5804e4ad0fa453b4ba55/pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7", size = 2414918, upload-time = "2025-04-12T17:47:58.217Z" }, - { url = "https://files.pythonhosted.org/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", size = 3190185, upload-time = "2025-04-12T17:48:00.417Z" }, - { url = "https://files.pythonhosted.org/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", size = 3030306, upload-time = "2025-04-12T17:48:02.391Z" }, - { url = "https://files.pythonhosted.org/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", size = 4416121, upload-time = "2025-04-12T17:48:04.554Z" }, - { url = "https://files.pythonhosted.org/packages/62/73/972b7742e38ae0e2ac76ab137ca6005dcf877480da0d9d61d93b613065b4/pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4", size = 4501707, upload-time = "2025-04-12T17:48:06.831Z" }, - { url = "https://files.pythonhosted.org/packages/e4/3a/427e4cb0b9e177efbc1a84798ed20498c4f233abde003c06d2650a6d60cb/pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d", size = 4522921, upload-time = "2025-04-12T17:48:09.229Z" }, - { url = "https://files.pythonhosted.org/packages/fe/7c/d8b1330458e4d2f3f45d9508796d7caf0c0d3764c00c823d10f6f1a3b76d/pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4", size = 4612523, upload-time = "2025-04-12T17:48:11.631Z" }, - { url = "https://files.pythonhosted.org/packages/b3/2f/65738384e0b1acf451de5a573d8153fe84103772d139e1e0bdf1596be2ea/pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443", size = 4587836, upload-time = "2025-04-12T17:48:13.592Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c5/e795c9f2ddf3debb2dedd0df889f2fe4b053308bb59a3cc02a0cd144d641/pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c", size = 4669390, upload-time = "2025-04-12T17:48:15.938Z" }, - { url = "https://files.pythonhosted.org/packages/96/ae/ca0099a3995976a9fce2f423166f7bff9b12244afdc7520f6ed38911539a/pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3", size = 2332309, upload-time = "2025-04-12T17:48:17.885Z" }, - { url = "https://files.pythonhosted.org/packages/7c/18/24bff2ad716257fc03da964c5e8f05d9790a779a8895d6566e493ccf0189/pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941", size = 2676768, upload-time = "2025-04-12T17:48:19.655Z" }, - { url = "https://files.pythonhosted.org/packages/da/bb/e8d656c9543276517ee40184aaa39dcb41e683bca121022f9323ae11b39d/pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb", size = 2415087, upload-time = "2025-04-12T17:48:21.991Z" }, - { url = "https://files.pythonhosted.org/packages/36/9c/447528ee3776e7ab8897fe33697a7ff3f0475bb490c5ac1456a03dc57956/pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28", size = 3190098, upload-time = "2025-04-12T17:48:23.915Z" }, - { url = "https://files.pythonhosted.org/packages/b5/09/29d5cd052f7566a63e5b506fac9c60526e9ecc553825551333e1e18a4858/pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830", size = 3030166, upload-time = "2025-04-12T17:48:25.738Z" }, - { url = "https://files.pythonhosted.org/packages/71/5d/446ee132ad35e7600652133f9c2840b4799bbd8e4adba881284860da0a36/pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0", size = 4408674, upload-time = "2025-04-12T17:48:27.908Z" }, - { url = "https://files.pythonhosted.org/packages/69/5f/cbe509c0ddf91cc3a03bbacf40e5c2339c4912d16458fcb797bb47bcb269/pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1", size = 4496005, upload-time = "2025-04-12T17:48:29.888Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b3/dd4338d8fb8a5f312021f2977fb8198a1184893f9b00b02b75d565c33b51/pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f", size = 4518707, upload-time = "2025-04-12T17:48:31.874Z" }, - { url = "https://files.pythonhosted.org/packages/13/eb/2552ecebc0b887f539111c2cd241f538b8ff5891b8903dfe672e997529be/pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155", size = 4610008, upload-time = "2025-04-12T17:48:34.422Z" }, - { url = "https://files.pythonhosted.org/packages/72/d1/924ce51bea494cb6e7959522d69d7b1c7e74f6821d84c63c3dc430cbbf3b/pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14", size = 4585420, upload-time = "2025-04-12T17:48:37.641Z" }, - { url = "https://files.pythonhosted.org/packages/43/ab/8f81312d255d713b99ca37479a4cb4b0f48195e530cdc1611990eb8fd04b/pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b", size = 4667655, upload-time = "2025-04-12T17:48:39.652Z" }, - { url = "https://files.pythonhosted.org/packages/94/86/8f2e9d2dc3d308dfd137a07fe1cc478df0a23d42a6c4093b087e738e4827/pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2", size = 2332329, upload-time = "2025-04-12T17:48:41.765Z" }, - { url = "https://files.pythonhosted.org/packages/6d/ec/1179083b8d6067a613e4d595359b5fdea65d0a3b7ad623fee906e1b3c4d2/pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691", size = 2676388, upload-time = "2025-04-12T17:48:43.625Z" }, - { url = "https://files.pythonhosted.org/packages/23/f1/2fc1e1e294de897df39fa8622d829b8828ddad938b0eaea256d65b84dd72/pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c", size = 2414950, upload-time = "2025-04-12T17:48:45.475Z" }, - { url = "https://files.pythonhosted.org/packages/c4/3e/c328c48b3f0ead7bab765a84b4977acb29f101d10e4ef57a5e3400447c03/pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22", size = 3192759, upload-time = "2025-04-12T17:48:47.866Z" }, - { url = "https://files.pythonhosted.org/packages/18/0e/1c68532d833fc8b9f404d3a642991441d9058eccd5606eab31617f29b6d4/pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7", size = 3033284, upload-time = "2025-04-12T17:48:50.189Z" }, - { url = "https://files.pythonhosted.org/packages/b7/cb/6faf3fb1e7705fd2db74e070f3bf6f88693601b0ed8e81049a8266de4754/pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16", size = 4445826, upload-time = "2025-04-12T17:48:52.346Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/8be03d50b70ca47fb434a358919d6a8d6580f282bbb7af7e4aa40103461d/pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b", size = 4527329, upload-time = "2025-04-12T17:48:54.403Z" }, - { url = "https://files.pythonhosted.org/packages/fd/a4/bfe78777076dc405e3bd2080bc32da5ab3945b5a25dc5d8acaa9de64a162/pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406", size = 4549049, upload-time = "2025-04-12T17:48:56.383Z" }, - { url = "https://files.pythonhosted.org/packages/65/4d/eaf9068dc687c24979e977ce5677e253624bd8b616b286f543f0c1b91662/pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91", size = 4635408, upload-time = "2025-04-12T17:48:58.782Z" }, - { url = "https://files.pythonhosted.org/packages/1d/26/0fd443365d9c63bc79feb219f97d935cd4b93af28353cba78d8e77b61719/pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751", size = 4614863, upload-time = "2025-04-12T17:49:00.709Z" }, - { url = "https://files.pythonhosted.org/packages/49/65/dca4d2506be482c2c6641cacdba5c602bc76d8ceb618fd37de855653a419/pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9", size = 4692938, upload-time = "2025-04-12T17:49:02.946Z" }, - { url = "https://files.pythonhosted.org/packages/b3/92/1ca0c3f09233bd7decf8f7105a1c4e3162fb9142128c74adad0fb361b7eb/pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd", size = 2335774, upload-time = "2025-04-12T17:49:04.889Z" }, - { url = "https://files.pythonhosted.org/packages/a5/ac/77525347cb43b83ae905ffe257bbe2cc6fd23acb9796639a1f56aa59d191/pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e", size = 2681895, upload-time = "2025-04-12T17:49:06.635Z" }, - { url = "https://files.pythonhosted.org/packages/67/32/32dc030cfa91ca0fc52baebbba2e009bb001122a1daa8b6a79ad830b38d3/pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", size = 2417234, upload-time = "2025-04-12T17:49:08.399Z" }, - { url = "https://files.pythonhosted.org/packages/21/3a/c1835d1c7cf83559e95b4f4ed07ab0bb7acc689712adfce406b3f456e9fd/pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8", size = 3198391, upload-time = "2025-04-12T17:49:10.122Z" }, - { url = "https://files.pythonhosted.org/packages/b6/4d/dcb7a9af3fc1e8653267c38ed622605d9d1793349274b3ef7af06457e257/pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909", size = 3030573, upload-time = "2025-04-12T17:49:11.938Z" }, - { url = "https://files.pythonhosted.org/packages/9d/29/530ca098c1a1eb31d4e163d317d0e24e6d2ead907991c69ca5b663de1bc5/pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928", size = 4398677, upload-time = "2025-04-12T17:49:13.861Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ee/0e5e51db34de1690264e5f30dcd25328c540aa11d50a3bc0b540e2a445b6/pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79", size = 4484986, upload-time = "2025-04-12T17:49:15.948Z" }, - { url = "https://files.pythonhosted.org/packages/93/7d/bc723b41ce3d2c28532c47678ec988974f731b5c6fadd5b3a4fba9015e4f/pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35", size = 4501897, upload-time = "2025-04-12T17:49:17.839Z" }, - { url = "https://files.pythonhosted.org/packages/be/0b/532e31abc7389617ddff12551af625a9b03cd61d2989fa595e43c470ec67/pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb", size = 4592618, upload-time = "2025-04-12T17:49:19.7Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f0/21ed6499a6216fef753e2e2254a19d08bff3747108ba042422383f3e9faa/pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a", size = 4570493, upload-time = "2025-04-12T17:49:21.703Z" }, - { url = "https://files.pythonhosted.org/packages/68/de/17004ddb8ab855573fe1127ab0168d11378cdfe4a7ee2a792a70ff2e9ba7/pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36", size = 4647748, upload-time = "2025-04-12T17:49:23.579Z" }, - { url = "https://files.pythonhosted.org/packages/c7/23/82ecb486384bb3578115c509d4a00bb52f463ee700a5ca1be53da3c88c19/pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67", size = 2331731, upload-time = "2025-04-12T17:49:25.58Z" }, - { url = "https://files.pythonhosted.org/packages/58/bb/87efd58b3689537a623d44dbb2550ef0bb5ff6a62769707a0fe8b1a7bdeb/pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1", size = 2676346, upload-time = "2025-04-12T17:49:27.342Z" }, - { url = "https://files.pythonhosted.org/packages/80/08/dc268475b22887b816e5dcfae31bce897f524b4646bab130c2142c9b2400/pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e", size = 2414623, upload-time = "2025-04-12T17:49:29.139Z" }, - { url = "https://files.pythonhosted.org/packages/33/49/c8c21e4255b4f4a2c0c68ac18125d7f5460b109acc6dfdef1a24f9b960ef/pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156", size = 3181727, upload-time = "2025-04-12T17:49:31.898Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f1/f7255c0838f8c1ef6d55b625cfb286835c17e8136ce4351c5577d02c443b/pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772", size = 2999833, upload-time = "2025-04-12T17:49:34.2Z" }, - { url = "https://files.pythonhosted.org/packages/e2/57/9968114457bd131063da98d87790d080366218f64fa2943b65ac6739abb3/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363", size = 3437472, upload-time = "2025-04-12T17:49:36.294Z" }, - { url = "https://files.pythonhosted.org/packages/b2/1b/e35d8a158e21372ecc48aac9c453518cfe23907bb82f950d6e1c72811eb0/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0", size = 3459976, upload-time = "2025-04-12T17:49:38.988Z" }, - { url = "https://files.pythonhosted.org/packages/26/da/2c11d03b765efff0ccc473f1c4186dc2770110464f2177efaed9cf6fae01/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01", size = 3527133, upload-time = "2025-04-12T17:49:40.985Z" }, - { url = "https://files.pythonhosted.org/packages/79/1a/4e85bd7cadf78412c2a3069249a09c32ef3323650fd3005c97cca7aa21df/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193", size = 3571555, upload-time = "2025-04-12T17:49:42.964Z" }, - { url = "https://files.pythonhosted.org/packages/69/03/239939915216de1e95e0ce2334bf17a7870ae185eb390fab6d706aadbfc0/pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013", size = 2674713, upload-time = "2025-04-12T17:49:44.944Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ad/2613c04633c7257d9481ab21d6b5364b59fc5d75faafd7cb8693523945a3/pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed", size = 3181734, upload-time = "2025-04-12T17:49:46.789Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fd/dcdda4471ed667de57bb5405bb42d751e6cfdd4011a12c248b455c778e03/pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c", size = 2999841, upload-time = "2025-04-12T17:49:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/ac/89/8a2536e95e77432833f0db6fd72a8d310c8e4272a04461fb833eb021bf94/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd", size = 3437470, upload-time = "2025-04-12T17:49:50.831Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8f/abd47b73c60712f88e9eda32baced7bfc3e9bd6a7619bb64b93acff28c3e/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076", size = 3460013, upload-time = "2025-04-12T17:49:53.278Z" }, - { url = "https://files.pythonhosted.org/packages/f6/20/5c0a0aa83b213b7a07ec01e71a3d6ea2cf4ad1d2c686cc0168173b6089e7/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b", size = 3527165, upload-time = "2025-04-12T17:49:55.164Z" }, - { url = "https://files.pythonhosted.org/packages/58/0e/2abab98a72202d91146abc839e10c14f7cf36166f12838ea0c4db3ca6ecb/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f", size = 3571586, upload-time = "2025-04-12T17:49:57.171Z" }, - { url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751, upload-time = "2025-04-12T17:49:59.628Z" }, +version = "11.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860", size = 5316554, upload-time = "2025-07-01T09:13:39.342Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad", size = 4686548, upload-time = "2025-07-01T09:13:41.835Z" }, + { url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0", size = 5859742, upload-time = "2025-07-03T13:09:47.439Z" }, + { url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b", size = 7633087, upload-time = "2025-07-03T13:09:51.796Z" }, + { url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50", size = 5963350, upload-time = "2025-07-01T09:13:43.865Z" }, + { url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae", size = 6631840, upload-time = "2025-07-01T09:13:46.161Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9", size = 6074005, upload-time = "2025-07-01T09:13:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e", size = 6708372, upload-time = "2025-07-01T09:13:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6", size = 6277090, upload-time = "2025-07-01T09:13:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f", size = 6985988, upload-time = "2025-07-01T09:13:55.699Z" }, + { url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f", size = 2422899, upload-time = "2025-07-01T09:13:57.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, + { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, + { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, + { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, + { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, + { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, + { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, + { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, + { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, + { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, + { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, + { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8e/9c089f01677d1264ab8648352dcb7773f37da6ad002542760c80107da816/pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f", size = 5316478, upload-time = "2025-07-01T09:15:52.209Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a9/5749930caf674695867eb56a581e78eb5f524b7583ff10b01b6e5048acb3/pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081", size = 4686522, upload-time = "2025-07-01T09:15:54.162Z" }, + { url = "https://files.pythonhosted.org/packages/43/46/0b85b763eb292b691030795f9f6bb6fcaf8948c39413c81696a01c3577f7/pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4", size = 5853376, upload-time = "2025-07-03T13:11:01.066Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/1a230ec0067243cbd60bc2dad5dc3ab46a8a41e21c15f5c9b52b26873069/pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc", size = 7626020, upload-time = "2025-07-03T13:11:06.479Z" }, + { url = "https://files.pythonhosted.org/packages/63/dd/f296c27ffba447bfad76c6a0c44c1ea97a90cb9472b9304c94a732e8dbfb/pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06", size = 5956732, upload-time = "2025-07-01T09:15:56.111Z" }, + { url = "https://files.pythonhosted.org/packages/a5/a0/98a3630f0b57f77bae67716562513d3032ae70414fcaf02750279c389a9e/pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a", size = 6624404, upload-time = "2025-07-01T09:15:58.245Z" }, + { url = "https://files.pythonhosted.org/packages/de/e6/83dfba5646a290edd9a21964da07674409e410579c341fc5b8f7abd81620/pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978", size = 6067760, upload-time = "2025-07-01T09:16:00.003Z" }, + { url = "https://files.pythonhosted.org/packages/bc/41/15ab268fe6ee9a2bc7391e2bbb20a98d3974304ab1a406a992dcb297a370/pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d", size = 6700534, upload-time = "2025-07-01T09:16:02.29Z" }, + { url = "https://files.pythonhosted.org/packages/64/79/6d4f638b288300bed727ff29f2a3cb63db054b33518a95f27724915e3fbc/pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71", size = 6277091, upload-time = "2025-07-01T09:16:04.4Z" }, + { url = "https://files.pythonhosted.org/packages/46/05/4106422f45a05716fd34ed21763f8ec182e8ea00af6e9cb05b93a247361a/pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada", size = 6986091, upload-time = "2025-07-01T09:16:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/63/c6/287fd55c2c12761d0591549d48885187579b7c257bef0c6660755b0b59ae/pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb", size = 2422632, upload-time = "2025-07-01T09:16:08.142Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967", size = 5270556, upload-time = "2025-07-01T09:16:09.961Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe", size = 4654625, upload-time = "2025-07-01T09:16:11.913Z" }, + { url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c", size = 4874207, upload-time = "2025-07-03T13:11:10.201Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25", size = 6583939, upload-time = "2025-07-03T13:11:15.68Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27", size = 4957166, upload-time = "2025-07-01T09:16:13.74Z" }, + { url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a", size = 5581482, upload-time = "2025-07-01T09:16:16.107Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f", size = 6984596, upload-time = "2025-07-01T09:16:18.07Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, ] [[package]] @@ -1907,7 +2123,7 @@ wheels = [ [[package]] name = "pre-commit" -version = "4.2.0" +version = "4.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, @@ -1916,9 +2132,9 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, ] [[package]] @@ -1968,64 +2184,52 @@ wheels = [ [[package]] name = "pyarrow" -version = "20.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/ee/a7810cb9f3d6e9238e61d312076a9859bf3668fd21c69744de9532383912/pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1", size = 1125187, upload-time = "2025-04-27T12:34:23.264Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/23/77094eb8ee0dbe88441689cb6afc40ac312a1e15d3a7acc0586999518222/pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7", size = 30832591, upload-time = "2025-04-27T12:27:27.89Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d5/48cc573aff00d62913701d9fac478518f693b30c25f2c157550b0b2565cb/pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4", size = 32273686, upload-time = "2025-04-27T12:27:36.816Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae", size = 41337051, upload-time = "2025-04-27T12:27:44.4Z" }, - { url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee", size = 42404659, upload-time = "2025-04-27T12:27:51.715Z" }, - { url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20", size = 40695446, upload-time = "2025-04-27T12:27:59.643Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9", size = 42278528, upload-time = "2025-04-27T12:28:07.297Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75", size = 42918162, upload-time = "2025-04-27T12:28:15.716Z" }, - { url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8", size = 44550319, upload-time = "2025-04-27T12:28:27.026Z" }, - { url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191", size = 25770759, upload-time = "2025-04-27T12:28:33.702Z" }, - { url = "https://files.pythonhosted.org/packages/47/a2/b7930824181ceadd0c63c1042d01fa4ef63eee233934826a7a2a9af6e463/pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0", size = 30856035, upload-time = "2025-04-27T12:28:40.78Z" }, - { url = "https://files.pythonhosted.org/packages/9b/18/c765770227d7f5bdfa8a69f64b49194352325c66a5c3bb5e332dfd5867d9/pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb", size = 32309552, upload-time = "2025-04-27T12:28:47.051Z" }, - { url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232", size = 41334704, upload-time = "2025-04-27T12:28:55.064Z" }, - { url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f", size = 42399836, upload-time = "2025-04-27T12:29:02.13Z" }, - { url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab", size = 40711789, upload-time = "2025-04-27T12:29:09.951Z" }, - { url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62", size = 42301124, upload-time = "2025-04-27T12:29:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c", size = 42916060, upload-time = "2025-04-27T12:29:24.253Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3", size = 44547640, upload-time = "2025-04-27T12:29:32.782Z" }, - { url = "https://files.pythonhosted.org/packages/54/96/46613131b4727f10fd2ffa6d0d6f02efcc09a0e7374eff3b5771548aa95b/pyarrow-20.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc", size = 25781491, upload-time = "2025-04-27T12:29:38.464Z" }, - { url = "https://files.pythonhosted.org/packages/a1/d6/0c10e0d54f6c13eb464ee9b67a68b8c71bcf2f67760ef5b6fbcddd2ab05f/pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:75a51a5b0eef32727a247707d4755322cb970be7e935172b6a3a9f9ae98404ba", size = 30815067, upload-time = "2025-04-27T12:29:44.384Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e2/04e9874abe4094a06fd8b0cbb0f1312d8dd7d707f144c2ec1e5e8f452ffa/pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:211d5e84cecc640c7a3ab900f930aaff5cd2702177e0d562d426fb7c4f737781", size = 32297128, upload-time = "2025-04-27T12:29:52.038Z" }, - { url = "https://files.pythonhosted.org/packages/31/fd/c565e5dcc906a3b471a83273039cb75cb79aad4a2d4a12f76cc5ae90a4b8/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199", size = 41334890, upload-time = "2025-04-27T12:29:59.452Z" }, - { url = "https://files.pythonhosted.org/packages/af/a9/3bdd799e2c9b20c1ea6dc6fa8e83f29480a97711cf806e823f808c2316ac/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd", size = 42421775, upload-time = "2025-04-27T12:30:06.875Z" }, - { url = "https://files.pythonhosted.org/packages/10/f7/da98ccd86354c332f593218101ae56568d5dcedb460e342000bd89c49cc1/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28", size = 40687231, upload-time = "2025-04-27T12:30:13.954Z" }, - { url = "https://files.pythonhosted.org/packages/bb/1b/2168d6050e52ff1e6cefc61d600723870bf569cbf41d13db939c8cf97a16/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8", size = 42295639, upload-time = "2025-04-27T12:30:21.949Z" }, - { url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e", size = 42908549, upload-time = "2025-04-27T12:30:29.551Z" }, - { url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a", size = 44557216, upload-time = "2025-04-27T12:30:36.977Z" }, - { url = "https://files.pythonhosted.org/packages/a0/8e/9adee63dfa3911be2382fb4d92e4b2e7d82610f9d9f668493bebaa2af50f/pyarrow-20.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b", size = 25660496, upload-time = "2025-04-27T12:30:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/9b/aa/daa413b81446d20d4dad2944110dcf4cf4f4179ef7f685dd5a6d7570dc8e/pyarrow-20.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a15532e77b94c61efadde86d10957950392999503b3616b2ffcef7621a002893", size = 30798501, upload-time = "2025-04-27T12:30:48.351Z" }, - { url = "https://files.pythonhosted.org/packages/ff/75/2303d1caa410925de902d32ac215dc80a7ce7dd8dfe95358c165f2adf107/pyarrow-20.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:dd43f58037443af715f34f1322c782ec463a3c8a94a85fdb2d987ceb5658e061", size = 32277895, upload-time = "2025-04-27T12:30:55.238Z" }, - { url = "https://files.pythonhosted.org/packages/92/41/fe18c7c0b38b20811b73d1bdd54b1fccba0dab0e51d2048878042d84afa8/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae", size = 41327322, upload-time = "2025-04-27T12:31:05.587Z" }, - { url = "https://files.pythonhosted.org/packages/da/ab/7dbf3d11db67c72dbf36ae63dcbc9f30b866c153b3a22ef728523943eee6/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4", size = 42411441, upload-time = "2025-04-27T12:31:15.675Z" }, - { url = "https://files.pythonhosted.org/packages/90/c3/0c7da7b6dac863af75b64e2f827e4742161128c350bfe7955b426484e226/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5", size = 40677027, upload-time = "2025-04-27T12:31:24.631Z" }, - { url = "https://files.pythonhosted.org/packages/be/27/43a47fa0ff9053ab5203bb3faeec435d43c0d8bfa40179bfd076cdbd4e1c/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b", size = 42281473, upload-time = "2025-04-27T12:31:31.311Z" }, - { url = "https://files.pythonhosted.org/packages/bc/0b/d56c63b078876da81bbb9ba695a596eabee9b085555ed12bf6eb3b7cab0e/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3", size = 42893897, upload-time = "2025-04-27T12:31:39.406Z" }, - { url = "https://files.pythonhosted.org/packages/92/ac/7d4bd020ba9145f354012838692d48300c1b8fe5634bfda886abcada67ed/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368", size = 44543847, upload-time = "2025-04-27T12:31:45.997Z" }, - { url = "https://files.pythonhosted.org/packages/9d/07/290f4abf9ca702c5df7b47739c1b2c83588641ddfa2cc75e34a301d42e55/pyarrow-20.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:30b3051b7975801c1e1d387e17c588d8ab05ced9b1e14eec57915f79869b5031", size = 25653219, upload-time = "2025-04-27T12:31:54.11Z" }, - { url = "https://files.pythonhosted.org/packages/95/df/720bb17704b10bd69dde086e1400b8eefb8f58df3f8ac9cff6c425bf57f1/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:ca151afa4f9b7bc45bcc791eb9a89e90a9eb2772767d0b1e5389609c7d03db63", size = 30853957, upload-time = "2025-04-27T12:31:59.215Z" }, - { url = "https://files.pythonhosted.org/packages/d9/72/0d5f875efc31baef742ba55a00a25213a19ea64d7176e0fe001c5d8b6e9a/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:4680f01ecd86e0dd63e39eb5cd59ef9ff24a9d166db328679e36c108dc993d4c", size = 32247972, upload-time = "2025-04-27T12:32:05.369Z" }, - { url = "https://files.pythonhosted.org/packages/d5/bc/e48b4fa544d2eea72f7844180eb77f83f2030b84c8dad860f199f94307ed/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70", size = 41256434, upload-time = "2025-04-27T12:32:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/c3/01/974043a29874aa2cf4f87fb07fd108828fc7362300265a2a64a94965e35b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b", size = 42353648, upload-time = "2025-04-27T12:32:20.766Z" }, - { url = "https://files.pythonhosted.org/packages/68/95/cc0d3634cde9ca69b0e51cbe830d8915ea32dda2157560dda27ff3b3337b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122", size = 40619853, upload-time = "2025-04-27T12:32:28.1Z" }, - { url = "https://files.pythonhosted.org/packages/29/c2/3ad40e07e96a3e74e7ed7cc8285aadfa84eb848a798c98ec0ad009eb6bcc/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6", size = 42241743, upload-time = "2025-04-27T12:32:35.792Z" }, - { url = "https://files.pythonhosted.org/packages/eb/cb/65fa110b483339add6a9bc7b6373614166b14e20375d4daa73483755f830/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c", size = 42839441, upload-time = "2025-04-27T12:32:46.64Z" }, - { url = "https://files.pythonhosted.org/packages/98/7b/f30b1954589243207d7a0fbc9997401044bf9a033eec78f6cb50da3f304a/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a", size = 44503279, upload-time = "2025-04-27T12:32:56.503Z" }, - { url = "https://files.pythonhosted.org/packages/37/40/ad395740cd641869a13bcf60851296c89624662575621968dcfafabaa7f6/pyarrow-20.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:82f1ee5133bd8f49d31be1299dc07f585136679666b502540db854968576faf9", size = 25944982, upload-time = "2025-04-27T12:33:04.72Z" }, - { url = "https://files.pythonhosted.org/packages/10/53/421820fa125138c868729b930d4bc487af2c4b01b1c6104818aab7e98f13/pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:1bcbe471ef3349be7714261dea28fe280db574f9d0f77eeccc195a2d161fd861", size = 30844702, upload-time = "2025-04-27T12:33:12.122Z" }, - { url = "https://files.pythonhosted.org/packages/2e/70/fd75e03312b715e90d928fb91ed8d45c9b0520346e5231b1c69293afd4c7/pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a18a14baef7d7ae49247e75641fd8bcbb39f44ed49a9fc4ec2f65d5031aa3b96", size = 32287180, upload-time = "2025-04-27T12:33:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/c4/e3/21e5758e46219fdedf5e6c800574dd9d17e962e80014cfe08d6d475be863/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc", size = 41351968, upload-time = "2025-04-27T12:33:28.215Z" }, - { url = "https://files.pythonhosted.org/packages/ac/f5/ed6a4c4b11f9215092a35097a985485bb7d879cb79d93d203494e8604f4e/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec", size = 42415208, upload-time = "2025-04-27T12:33:37.04Z" }, - { url = "https://files.pythonhosted.org/packages/44/e5/466a63668ba25788ee8d38d55f853a60469ae7ad1cda343db9f3f45e0b0a/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5", size = 40708556, upload-time = "2025-04-27T12:33:46.483Z" }, - { url = "https://files.pythonhosted.org/packages/e8/d7/4c4d4e4cf6e53e16a519366dfe9223ee4a7a38e6e28c1c0d372b38ba3fe7/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b", size = 42291754, upload-time = "2025-04-27T12:33:55.4Z" }, - { url = "https://files.pythonhosted.org/packages/07/d5/79effb32585b7c18897d3047a2163034f3f9c944d12f7b2fd8df6a2edc70/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d", size = 42936483, upload-time = "2025-04-27T12:34:03.694Z" }, - { url = "https://files.pythonhosted.org/packages/09/5c/f707603552c058b2e9129732de99a67befb1f13f008cc58856304a62c38b/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619", size = 44558895, upload-time = "2025-04-27T12:34:13.26Z" }, - { url = "https://files.pythonhosted.org/packages/26/cc/1eb6a01c1bbc787f596c270c46bcd2273e35154a84afcb1d0cb4cc72457e/pyarrow-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:9965a050048ab02409fb7cbbefeedba04d3d67f2cc899eff505cc084345959ca", size = 25785667, upload-time = "2025-04-27T12:34:19.739Z" }, +version = "21.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487, upload-time = "2025-07-18T00:57:31.761Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26", size = 31196837, upload-time = "2025-07-18T00:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79", size = 32659470, upload-time = "2025-07-18T00:54:38.329Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb", size = 41055619, upload-time = "2025-07-18T00:54:42.172Z" }, + { url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51", size = 42733488, upload-time = "2025-07-18T00:54:47.132Z" }, + { url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a", size = 43329159, upload-time = "2025-07-18T00:54:51.686Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594", size = 45050567, upload-time = "2025-07-18T00:54:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634", size = 26217959, upload-time = "2025-07-18T00:55:00.482Z" }, + { url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b", size = 31243234, upload-time = "2025-07-18T00:55:03.812Z" }, + { url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10", size = 32714370, upload-time = "2025-07-18T00:55:07.495Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e", size = 41135424, upload-time = "2025-07-18T00:55:11.461Z" }, + { url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569", size = 42823810, upload-time = "2025-07-18T00:55:16.301Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e", size = 43391538, upload-time = "2025-07-18T00:55:23.82Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c", size = 45120056, upload-time = "2025-07-18T00:55:28.231Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6", size = 26220568, upload-time = "2025-07-18T00:55:32.122Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd", size = 31160305, upload-time = "2025-07-18T00:55:35.373Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876", size = 32684264, upload-time = "2025-07-18T00:55:39.303Z" }, + { url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d", size = 41108099, upload-time = "2025-07-18T00:55:42.889Z" }, + { url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e", size = 42829529, upload-time = "2025-07-18T00:55:47.069Z" }, + { url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82", size = 43367883, upload-time = "2025-07-18T00:55:53.069Z" }, + { url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623", size = 45133802, upload-time = "2025-07-18T00:55:57.714Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18", size = 26203175, upload-time = "2025-07-18T00:56:01.364Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a", size = 31154306, upload-time = "2025-07-18T00:56:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe", size = 32680622, upload-time = "2025-07-18T00:56:07.505Z" }, + { url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd", size = 41104094, upload-time = "2025-07-18T00:56:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61", size = 42825576, upload-time = "2025-07-18T00:56:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d", size = 43368342, upload-time = "2025-07-18T00:56:19.531Z" }, + { url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99", size = 45131218, upload-time = "2025-07-18T00:56:23.347Z" }, + { url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636", size = 26087551, upload-time = "2025-07-18T00:56:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da", size = 31290064, upload-time = "2025-07-18T00:56:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7", size = 32727837, upload-time = "2025-07-18T00:56:33.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6", size = 41014158, upload-time = "2025-07-18T00:56:37.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8", size = 42667885, upload-time = "2025-07-18T00:56:41.483Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503", size = 43276625, upload-time = "2025-07-18T00:56:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79", size = 44951890, upload-time = "2025-07-18T00:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10", size = 26371006, upload-time = "2025-07-18T00:56:56.379Z" }, + { url = "https://files.pythonhosted.org/packages/3e/cc/ce4939f4b316457a083dc5718b3982801e8c33f921b3c98e7a93b7c7491f/pyarrow-21.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a7f6524e3747e35f80744537c78e7302cd41deee8baa668d56d55f77d9c464b3", size = 31211248, upload-time = "2025-07-18T00:56:59.7Z" }, + { url = "https://files.pythonhosted.org/packages/1f/c2/7a860931420d73985e2f340f06516b21740c15b28d24a0e99a900bb27d2b/pyarrow-21.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:203003786c9fd253ebcafa44b03c06983c9c8d06c3145e37f1b76a1f317aeae1", size = 32676896, upload-time = "2025-07-18T00:57:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/197f989b9a75e59b4ca0db6a13c56f19a0ad8a298c68da9cc28145e0bb97/pyarrow-21.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b4d97e297741796fead24867a8dabf86c87e4584ccc03167e4a811f50fdf74d", size = 41067862, upload-time = "2025-07-18T00:57:07.587Z" }, + { url = "https://files.pythonhosted.org/packages/fa/82/6ecfa89487b35aa21accb014b64e0a6b814cc860d5e3170287bf5135c7d8/pyarrow-21.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:898afce396b80fdda05e3086b4256f8677c671f7b1d27a6976fa011d3fd0a86e", size = 42747508, upload-time = "2025-07-18T00:57:13.917Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b7/ba252f399bbf3addc731e8643c05532cf32e74cebb5e32f8f7409bc243cf/pyarrow-21.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:067c66ca29aaedae08218569a114e413b26e742171f526e828e1064fcdec13f4", size = 43345293, upload-time = "2025-07-18T00:57:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/a20819795bd702b9486f536a8eeb70a6aa64046fce32071c19ec8230dbaa/pyarrow-21.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0c4e75d13eb76295a49e0ea056eb18dbd87d81450bfeb8afa19a7e5a75ae2ad7", size = 45060670, upload-time = "2025-07-18T00:57:24.477Z" }, + { url = "https://files.pythonhosted.org/packages/10/15/6b30e77872012bbfe8265d42a01d5b3c17ef0ac0f2fae531ad91b6a6c02e/pyarrow-21.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdc4c17afda4dab2a9c0b79148a43a7f4e1094916b3e18d8975bfd6d6d52241f", size = 26227521, upload-time = "2025-07-18T00:57:29.119Z" }, ] [[package]] @@ -2039,11 +2243,11 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] [[package]] @@ -2144,23 +2348,27 @@ wheels = [ [[package]] name = "pywin32" -version = "310" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/da/a5f38fffbba2fb99aa4aa905480ac4b8e83ca486659ac8c95bce47fb5276/pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1", size = 8848240, upload-time = "2025-03-17T00:55:46.783Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fe/d873a773324fa565619ba555a82c9dabd677301720f3660a731a5d07e49a/pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d", size = 9601854, upload-time = "2025-03-17T00:55:48.783Z" }, - { url = "https://files.pythonhosted.org/packages/3c/84/1a8e3d7a15490d28a5d816efa229ecb4999cdc51a7c30dd8914f669093b8/pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213", size = 8522963, upload-time = "2025-03-17T00:55:50.969Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284, upload-time = "2025-03-17T00:55:53.124Z" }, - { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748, upload-time = "2025-03-17T00:55:55.203Z" }, - { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941, upload-time = "2025-03-17T00:55:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239, upload-time = "2025-03-17T00:55:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839, upload-time = "2025-03-17T00:56:00.8Z" }, - { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470, upload-time = "2025-03-17T00:56:02.601Z" }, - { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384, upload-time = "2025-03-17T00:56:04.383Z" }, - { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039, upload-time = "2025-03-17T00:56:06.207Z" }, - { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152, upload-time = "2025-03-17T00:56:07.819Z" }, - { url = "https://files.pythonhosted.org/packages/a2/cd/d09d434630edb6a0c44ad5079611279a67530296cfe0451e003de7f449ff/pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a", size = 8848099, upload-time = "2025-03-17T00:55:42.415Z" }, - { url = "https://files.pythonhosted.org/packages/93/ff/2a8c10315ffbdee7b3883ac0d1667e267ca8b3f6f640d81d43b87a82c0c7/pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475", size = 9602031, upload-time = "2025-03-17T00:55:44.512Z" }, +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/59/42/b86689aac0cdaee7ae1c58d464b0ff04ca909c19bb6502d4973cdd9f9544/pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b", size = 8760837, upload-time = "2025-07-14T20:12:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8a/1403d0353f8c5a2f0829d2b1c4becbf9da2f0a4d040886404fc4a5431e4d/pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91", size = 9590187, upload-time = "2025-07-14T20:13:01.419Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/e0e8d802f124772cec9c75430b01a212f86f9de7546bda715e54140d5aeb/pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d", size = 8778162, upload-time = "2025-07-14T20:13:03.544Z" }, ] [[package]] @@ -2218,162 +2426,184 @@ wheels = [ [[package]] name = "pyzmq" -version = "27.0.0" +version = "27.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "implementation_name == 'pypy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/06/50a4e9648b3e8b992bef8eb632e457307553a89d294103213cfd47b3da69/pyzmq-27.0.0.tar.gz", hash = "sha256:b1f08eeb9ce1510e6939b6e5dcd46a17765e2333daae78ecf4606808442e52cf", size = 280478, upload-time = "2025-06-13T14:09:07.087Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/09/1681d4b047626d352c083770618ac29655ab1f5c20eee31dc94c000b9b7b/pyzmq-27.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:b973ee650e8f442ce482c1d99ca7ab537c69098d53a3d046676a484fd710c87a", size = 1329291, upload-time = "2025-06-13T14:06:57.945Z" }, - { url = "https://files.pythonhosted.org/packages/9d/b2/9c9385225fdd54db9506ed8accbb9ea63ca813ba59d43d7f282a6a16a30b/pyzmq-27.0.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:661942bc7cd0223d569d808f2e5696d9cc120acc73bf3e88a1f1be7ab648a7e4", size = 905952, upload-time = "2025-06-13T14:07:03.232Z" }, - { url = "https://files.pythonhosted.org/packages/41/73/333c72c7ec182cdffe25649e3da1c3b9f3cf1cede63cfdc23d1384d4a601/pyzmq-27.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50360fb2a056ffd16e5f4177eee67f1dd1017332ea53fb095fe7b5bf29c70246", size = 666165, upload-time = "2025-06-13T14:07:04.667Z" }, - { url = "https://files.pythonhosted.org/packages/a5/fe/fc7b9c1a50981928e25635a926653cb755364316db59ccd6e79cfb9a0b4f/pyzmq-27.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf209a6dc4b420ed32a7093642843cbf8703ed0a7d86c16c0b98af46762ebefb", size = 853755, upload-time = "2025-06-13T14:07:06.93Z" }, - { url = "https://files.pythonhosted.org/packages/8c/4c/740ed4b6e8fa160cd19dc5abec8db68f440564b2d5b79c1d697d9862a2f7/pyzmq-27.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c2dace4a7041cca2fba5357a2d7c97c5effdf52f63a1ef252cfa496875a3762d", size = 1654868, upload-time = "2025-06-13T14:07:08.224Z" }, - { url = "https://files.pythonhosted.org/packages/97/00/875b2ecfcfc78ab962a59bd384995186818524ea957dc8ad3144611fae12/pyzmq-27.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:63af72b2955fc77caf0a77444baa2431fcabb4370219da38e1a9f8d12aaebe28", size = 2033443, upload-time = "2025-06-13T14:07:09.653Z" }, - { url = "https://files.pythonhosted.org/packages/60/55/6dd9c470c42d713297c5f2a56f7903dc1ebdb4ab2edda996445c21651900/pyzmq-27.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e8c4adce8e37e75c4215297d7745551b8dcfa5f728f23ce09bf4e678a9399413", size = 1891288, upload-time = "2025-06-13T14:07:11.099Z" }, - { url = "https://files.pythonhosted.org/packages/28/5d/54b0ef50d40d7c65a627f4a4b4127024ba9820f2af8acd933a4d30ae192e/pyzmq-27.0.0-cp310-cp310-win32.whl", hash = "sha256:5d5ef4718ecab24f785794e0e7536436698b459bfbc19a1650ef55280119d93b", size = 567936, upload-time = "2025-06-13T14:07:12.468Z" }, - { url = "https://files.pythonhosted.org/packages/18/ea/dedca4321de748ca48d3bcdb72274d4d54e8d84ea49088d3de174bd45d88/pyzmq-27.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:e40609380480b3d12c30f841323f42451c755b8fece84235236f5fe5ffca8c1c", size = 628686, upload-time = "2025-06-13T14:07:14.051Z" }, - { url = "https://files.pythonhosted.org/packages/d4/a7/fcdeedc306e71e94ac262cba2d02337d885f5cdb7e8efced8e5ffe327808/pyzmq-27.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:6b0397b0be277b46762956f576e04dc06ced265759e8c2ff41a0ee1aa0064198", size = 559039, upload-time = "2025-06-13T14:07:15.289Z" }, - { url = "https://files.pythonhosted.org/packages/44/df/84c630654106d9bd9339cdb564aa941ed41b023a0264251d6743766bb50e/pyzmq-27.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:21457825249b2a53834fa969c69713f8b5a79583689387a5e7aed880963ac564", size = 1332718, upload-time = "2025-06-13T14:07:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8e/f6a5461a07654d9840d256476434ae0ff08340bba562a455f231969772cb/pyzmq-27.0.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1958947983fef513e6e98eff9cb487b60bf14f588dc0e6bf35fa13751d2c8251", size = 908248, upload-time = "2025-06-13T14:07:18.033Z" }, - { url = "https://files.pythonhosted.org/packages/7c/93/82863e8d695a9a3ae424b63662733ae204a295a2627d52af2f62c2cd8af9/pyzmq-27.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0dc628b5493f9a8cd9844b8bee9732ef587ab00002157c9329e4fc0ef4d3afa", size = 668647, upload-time = "2025-06-13T14:07:19.378Z" }, - { url = "https://files.pythonhosted.org/packages/f3/85/15278769b348121eacdbfcbd8c4d40f1102f32fa6af5be1ffc032ed684be/pyzmq-27.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7bbe9e1ed2c8d3da736a15694d87c12493e54cc9dc9790796f0321794bbc91f", size = 856600, upload-time = "2025-06-13T14:07:20.906Z" }, - { url = "https://files.pythonhosted.org/packages/d4/af/1c469b3d479bd095edb28e27f12eee10b8f00b356acbefa6aeb14dd295d1/pyzmq-27.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dc1091f59143b471d19eb64f54bae4f54bcf2a466ffb66fe45d94d8d734eb495", size = 1657748, upload-time = "2025-06-13T14:07:22.549Z" }, - { url = "https://files.pythonhosted.org/packages/8c/f4/17f965d0ee6380b1d6326da842a50e4b8b9699745161207945f3745e8cb5/pyzmq-27.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7011ade88c8e535cf140f8d1a59428676fbbce7c6e54fefce58bf117aefb6667", size = 2034311, upload-time = "2025-06-13T14:07:23.966Z" }, - { url = "https://files.pythonhosted.org/packages/e0/6e/7c391d81fa3149fd759de45d298003de6cfab343fb03e92c099821c448db/pyzmq-27.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c386339d7e3f064213aede5d03d054b237937fbca6dd2197ac8cf3b25a6b14e", size = 1893630, upload-time = "2025-06-13T14:07:25.899Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e0/eaffe7a86f60e556399e224229e7769b717f72fec0706b70ab2c03aa04cb/pyzmq-27.0.0-cp311-cp311-win32.whl", hash = "sha256:0546a720c1f407b2172cb04b6b094a78773491497e3644863cf5c96c42df8cff", size = 567706, upload-time = "2025-06-13T14:07:27.595Z" }, - { url = "https://files.pythonhosted.org/packages/c9/05/89354a8cffdcce6e547d48adaaf7be17007fc75572123ff4ca90a4ca04fc/pyzmq-27.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:15f39d50bd6c9091c67315ceb878a4f531957b121d2a05ebd077eb35ddc5efed", size = 630322, upload-time = "2025-06-13T14:07:28.938Z" }, - { url = "https://files.pythonhosted.org/packages/fa/07/4ab976d5e1e63976719389cc4f3bfd248a7f5f2bb2ebe727542363c61b5f/pyzmq-27.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c5817641eebb391a2268c27fecd4162448e03538387093cdbd8bf3510c316b38", size = 558435, upload-time = "2025-06-13T14:07:30.256Z" }, - { url = "https://files.pythonhosted.org/packages/93/a7/9ad68f55b8834ede477842214feba6a4c786d936c022a67625497aacf61d/pyzmq-27.0.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:cbabc59dcfaac66655c040dfcb8118f133fb5dde185e5fc152628354c1598e52", size = 1305438, upload-time = "2025-06-13T14:07:31.676Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ee/26aa0f98665a22bc90ebe12dced1de5f3eaca05363b717f6fb229b3421b3/pyzmq-27.0.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:cb0ac5179cba4b2f94f1aa208fbb77b62c4c9bf24dd446278b8b602cf85fcda3", size = 895095, upload-time = "2025-06-13T14:07:33.104Z" }, - { url = "https://files.pythonhosted.org/packages/cf/85/c57e7ab216ecd8aa4cc7e3b83b06cc4e9cf45c87b0afc095f10cd5ce87c1/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53a48f0228eab6cbf69fde3aa3c03cbe04e50e623ef92ae395fce47ef8a76152", size = 651826, upload-time = "2025-06-13T14:07:34.831Z" }, - { url = "https://files.pythonhosted.org/packages/69/9a/9ea7e230feda9400fb0ae0d61d7d6ddda635e718d941c44eeab22a179d34/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:111db5f395e09f7e775f759d598f43cb815fc58e0147623c4816486e1a39dc22", size = 839750, upload-time = "2025-06-13T14:07:36.553Z" }, - { url = "https://files.pythonhosted.org/packages/08/66/4cebfbe71f3dfbd417011daca267539f62ed0fbc68105357b68bbb1a25b7/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c8878011653dcdc27cc2c57e04ff96f0471e797f5c19ac3d7813a245bcb24371", size = 1641357, upload-time = "2025-06-13T14:07:38.21Z" }, - { url = "https://files.pythonhosted.org/packages/ac/f6/b0f62578c08d2471c791287149cb8c2aaea414ae98c6e995c7dbe008adfb/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:c0ed2c1f335ba55b5fdc964622254917d6b782311c50e138863eda409fbb3b6d", size = 2020281, upload-time = "2025-06-13T14:07:39.599Z" }, - { url = "https://files.pythonhosted.org/packages/37/b9/4f670b15c7498495da9159edc374ec09c88a86d9cd5a47d892f69df23450/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e918d70862d4cfd4b1c187310015646a14e1f5917922ab45b29f28f345eeb6be", size = 1877110, upload-time = "2025-06-13T14:07:41.027Z" }, - { url = "https://files.pythonhosted.org/packages/66/31/9dee25c226295b740609f0d46db2fe972b23b6f5cf786360980524a3ba92/pyzmq-27.0.0-cp312-abi3-win32.whl", hash = "sha256:88b4e43cab04c3c0f0d55df3b1eef62df2b629a1a369b5289a58f6fa8b07c4f4", size = 559297, upload-time = "2025-06-13T14:07:42.533Z" }, - { url = "https://files.pythonhosted.org/packages/9b/12/52da5509800f7ff2d287b2f2b4e636e7ea0f001181cba6964ff6c1537778/pyzmq-27.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:dce4199bf5f648a902ce37e7b3afa286f305cd2ef7a8b6ec907470ccb6c8b371", size = 619203, upload-time = "2025-06-13T14:07:43.843Z" }, - { url = "https://files.pythonhosted.org/packages/93/6d/7f2e53b19d1edb1eb4f09ec7c3a1f945ca0aac272099eab757d15699202b/pyzmq-27.0.0-cp312-abi3-win_arm64.whl", hash = "sha256:56e46bbb85d52c1072b3f809cc1ce77251d560bc036d3a312b96db1afe76db2e", size = 551927, upload-time = "2025-06-13T14:07:45.51Z" }, - { url = "https://files.pythonhosted.org/packages/19/62/876b27c4ff777db4ceba1c69ea90d3c825bb4f8d5e7cd987ce5802e33c55/pyzmq-27.0.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c36ad534c0c29b4afa088dc53543c525b23c0797e01b69fef59b1a9c0e38b688", size = 1340826, upload-time = "2025-06-13T14:07:46.881Z" }, - { url = "https://files.pythonhosted.org/packages/43/69/58ef8f4f59d3bcd505260c73bee87b008850f45edca40ddaba54273c35f4/pyzmq-27.0.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:67855c14173aec36395d7777aaba3cc527b393821f30143fd20b98e1ff31fd38", size = 897283, upload-time = "2025-06-13T14:07:49.562Z" }, - { url = "https://files.pythonhosted.org/packages/43/15/93a0d0396700a60475ad3c5d42c5f1c308d3570bc94626b86c71ef9953e0/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8617c7d43cd8ccdb62aebe984bfed77ca8f036e6c3e46dd3dddda64b10f0ab7a", size = 660567, upload-time = "2025-06-13T14:07:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b3/fe055513e498ca32f64509abae19b9c9eb4d7c829e02bd8997dd51b029eb/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:67bfbcbd0a04c575e8103a6061d03e393d9f80ffdb9beb3189261e9e9bc5d5e9", size = 847681, upload-time = "2025-06-13T14:07:52.77Z" }, - { url = "https://files.pythonhosted.org/packages/b6/4f/ff15300b00b5b602191f3df06bbc8dd4164e805fdd65bb77ffbb9c5facdc/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5cd11d46d7b7e5958121b3eaf4cd8638eff3a720ec527692132f05a57f14341d", size = 1650148, upload-time = "2025-06-13T14:07:54.178Z" }, - { url = "https://files.pythonhosted.org/packages/c4/6f/84bdfff2a224a6f26a24249a342e5906993c50b0761e311e81b39aef52a7/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:b801c2e40c5aa6072c2f4876de8dccd100af6d9918d4d0d7aa54a1d982fd4f44", size = 2023768, upload-time = "2025-06-13T14:07:55.714Z" }, - { url = "https://files.pythonhosted.org/packages/64/39/dc2db178c26a42228c5ac94a9cc595030458aa64c8d796a7727947afbf55/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:20d5cb29e8c5f76a127c75b6e7a77e846bc4b655c373baa098c26a61b7ecd0ef", size = 1885199, upload-time = "2025-06-13T14:07:57.166Z" }, - { url = "https://files.pythonhosted.org/packages/c7/21/dae7b06a1f8cdee5d8e7a63d99c5d129c401acc40410bef2cbf42025e26f/pyzmq-27.0.0-cp313-cp313t-win32.whl", hash = "sha256:a20528da85c7ac7a19b7384e8c3f8fa707841fd85afc4ed56eda59d93e3d98ad", size = 575439, upload-time = "2025-06-13T14:07:58.959Z" }, - { url = "https://files.pythonhosted.org/packages/eb/bc/1709dc55f0970cf4cb8259e435e6773f9946f41a045c2cb90e870b7072da/pyzmq-27.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d8229f2efece6a660ee211d74d91dbc2a76b95544d46c74c615e491900dc107f", size = 639933, upload-time = "2025-06-13T14:08:00.777Z" }, - { url = "https://files.pythonhosted.org/packages/19/dc/95210fe17e5d7dba89bd663e1d88f50a8003f296284731b09f1d95309a42/pyzmq-27.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:100f6e5052ba42b2533011d34a018a5ace34f8cac67cb03cfa37c8bdae0ca617", size = 1330656, upload-time = "2025-06-13T14:08:17.414Z" }, - { url = "https://files.pythonhosted.org/packages/d3/7e/63f742b578316258e03ecb393d35c0964348d80834bdec8a100ed7bb9c91/pyzmq-27.0.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:bf6c6b061efd00404b9750e2cfbd9507492c8d4b3721ded76cb03786131be2ed", size = 906522, upload-time = "2025-06-13T14:08:18.945Z" }, - { url = "https://files.pythonhosted.org/packages/1f/bf/f0b2b67f5a9bfe0fbd0e978a2becd901f802306aa8e29161cb0963094352/pyzmq-27.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ee05728c0b0b2484a9fc20466fa776fffb65d95f7317a3419985b8c908563861", size = 863545, upload-time = "2025-06-13T14:08:20.386Z" }, - { url = "https://files.pythonhosted.org/packages/87/0e/7d90ccd2ef577c8bae7f926acd2011a6d960eea8a068c5fd52b419206960/pyzmq-27.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7cdf07fe0a557b131366f80727ec8ccc4b70d89f1e3f920d94a594d598d754f0", size = 666796, upload-time = "2025-06-13T14:08:21.836Z" }, - { url = "https://files.pythonhosted.org/packages/4f/6d/ca8007a313baa73361778773aef210f4902e68f468d1f93b6c8b908fabbd/pyzmq-27.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:90252fa2ff3a104219db1f5ced7032a7b5fc82d7c8d2fec2b9a3e6fd4e25576b", size = 1655599, upload-time = "2025-06-13T14:08:23.343Z" }, - { url = "https://files.pythonhosted.org/packages/46/de/5cb4f99d6c0dd8f33d729c9ebd49af279586e5ab127e93aa6ef0ecd08c4c/pyzmq-27.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ea6d441c513bf18c578c73c323acf7b4184507fc244762193aa3a871333c9045", size = 2034119, upload-time = "2025-06-13T14:08:26.369Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8d/57cc90c8b5f30a97a7e86ec91a3b9822ec7859d477e9c30f531fb78f4a97/pyzmq-27.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ae2b34bcfaae20c064948a4113bf8709eee89fd08317eb293ae4ebd69b4d9740", size = 1891955, upload-time = "2025-06-13T14:08:28.39Z" }, - { url = "https://files.pythonhosted.org/packages/24/f5/a7012022573188903802ab75b5314b00e5c629228f3a36fadb421a42ebff/pyzmq-27.0.0-cp39-cp39-win32.whl", hash = "sha256:5b10bd6f008937705cf6e7bf8b6ece5ca055991e3eb130bca8023e20b86aa9a3", size = 568497, upload-time = "2025-06-13T14:08:30.089Z" }, - { url = "https://files.pythonhosted.org/packages/9b/f3/2a4b2798275a574801221d94d599ed3e26d19f6378a7364cdfa3bee53944/pyzmq-27.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:00387d12a8af4b24883895f7e6b9495dc20a66027b696536edac35cb988c38f3", size = 629315, upload-time = "2025-06-13T14:08:31.877Z" }, - { url = "https://files.pythonhosted.org/packages/da/eb/386a70314f305816142d6e8537f5557e5fd9614c03698d6c88cbd6c41190/pyzmq-27.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:4c19d39c04c29a6619adfeb19e3735c421b3bfee082f320662f52e59c47202ba", size = 559596, upload-time = "2025-06-13T14:08:33.357Z" }, - { url = "https://files.pythonhosted.org/packages/09/6f/be6523a7f3821c0b5370912ef02822c028611360e0d206dd945bdbf9eaef/pyzmq-27.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:656c1866505a5735d0660b7da6d7147174bbf59d4975fc2b7f09f43c9bc25745", size = 835950, upload-time = "2025-06-13T14:08:35Z" }, - { url = "https://files.pythonhosted.org/packages/c6/1e/a50fdd5c15018de07ab82a61bc460841be967ee7bbe7abee3b714d66f7ac/pyzmq-27.0.0-pp310-pypy310_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74175b9e12779382432dd1d1f5960ebe7465d36649b98a06c6b26be24d173fab", size = 799876, upload-time = "2025-06-13T14:08:36.849Z" }, - { url = "https://files.pythonhosted.org/packages/88/a1/89eb5b71f5a504f8f887aceb8e1eb3626e00c00aa8085381cdff475440dc/pyzmq-27.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8c6de908465697a8708e4d6843a1e884f567962fc61eb1706856545141d0cbb", size = 567400, upload-time = "2025-06-13T14:08:38.95Z" }, - { url = "https://files.pythonhosted.org/packages/56/aa/4571dbcff56cfb034bac73fde8294e123c975ce3eea89aff31bf6dc6382b/pyzmq-27.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c644aaacc01d0df5c7072826df45e67301f191c55f68d7b2916d83a9ddc1b551", size = 747031, upload-time = "2025-06-13T14:08:40.413Z" }, - { url = "https://files.pythonhosted.org/packages/46/e0/d25f30fe0991293c5b2f5ef3b070d35fa6d57c0c7428898c3ab4913d0297/pyzmq-27.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:10f70c1d9a446a85013a36871a296007f6fe4232b530aa254baf9da3f8328bc0", size = 544726, upload-time = "2025-06-13T14:08:41.997Z" }, - { url = "https://files.pythonhosted.org/packages/98/a6/92394373b8dbc1edc9d53c951e8d3989d518185174ee54492ec27711779d/pyzmq-27.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd1dc59763effd1576f8368047c9c31468fce0af89d76b5067641137506792ae", size = 835948, upload-time = "2025-06-13T14:08:43.516Z" }, - { url = "https://files.pythonhosted.org/packages/56/f3/4dc38d75d9995bfc18773df3e41f2a2ca9b740b06f1a15dbf404077e7588/pyzmq-27.0.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:60e8cc82d968174650c1860d7b716366caab9973787a1c060cf8043130f7d0f7", size = 799874, upload-time = "2025-06-13T14:08:45.017Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ba/64af397e0f421453dc68e31d5e0784d554bf39013a2de0872056e96e58af/pyzmq-27.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14fe7aaac86e4e93ea779a821967360c781d7ac5115b3f1a171ced77065a0174", size = 567400, upload-time = "2025-06-13T14:08:46.855Z" }, - { url = "https://files.pythonhosted.org/packages/63/87/ec956cbe98809270b59a22891d5758edae147a258e658bf3024a8254c855/pyzmq-27.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6ad0562d4e6abb785be3e4dd68599c41be821b521da38c402bc9ab2a8e7ebc7e", size = 747031, upload-time = "2025-06-13T14:08:48.419Z" }, - { url = "https://files.pythonhosted.org/packages/be/8a/4a3764a68abc02e2fbb0668d225b6fda5cd39586dd099cee8b2ed6ab0452/pyzmq-27.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:9df43a2459cd3a3563404c1456b2c4c69564daa7dbaf15724c09821a3329ce46", size = 544726, upload-time = "2025-06-13T14:08:49.903Z" }, - { url = "https://files.pythonhosted.org/packages/03/f6/11b2a6c8cd13275c31cddc3f89981a1b799a3c41dec55289fa18dede96b5/pyzmq-27.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:39ddd3ba0a641f01d8f13a3cfd4c4924eb58e660d8afe87e9061d6e8ca6f7ac3", size = 835944, upload-time = "2025-06-13T14:08:59.189Z" }, - { url = "https://files.pythonhosted.org/packages/73/34/aa39076f4e07ae1912fa4b966fe24e831e01d736d4c1c7e8a3aa28a555b5/pyzmq-27.0.0-pp39-pypy39_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:8ca7e6a0388dd9e1180b14728051068f4efe83e0d2de058b5ff92c63f399a73f", size = 799869, upload-time = "2025-06-13T14:09:00.758Z" }, - { url = "https://files.pythonhosted.org/packages/65/f3/81ed6b3dd242408ee79c0d8a88734644acf208baee8666ecd7e52664cf55/pyzmq-27.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2524c40891be6a3106885a3935d58452dd83eb7a5742a33cc780a1ad4c49dec0", size = 758371, upload-time = "2025-06-13T14:09:02.461Z" }, - { url = "https://files.pythonhosted.org/packages/e1/04/dac4ca674764281caf744e8adefd88f7e325e1605aba0f9a322094b903fa/pyzmq-27.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a56e3e5bd2d62a01744fd2f1ce21d760c7c65f030e9522738d75932a14ab62a", size = 567393, upload-time = "2025-06-13T14:09:04.037Z" }, - { url = "https://files.pythonhosted.org/packages/51/8b/619a9ee2fa4d3c724fbadde946427735ade64da03894b071bbdc3b789d83/pyzmq-27.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:096af9e133fec3a72108ddefba1e42985cb3639e9de52cfd336b6fc23aa083e9", size = 544715, upload-time = "2025-06-13T14:09:05.579Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/30/5f/557d2032a2f471edbcc227da724c24a1c05887b5cda1e3ae53af98b9e0a5/pyzmq-27.0.1.tar.gz", hash = "sha256:45c549204bc20e7484ffd2555f6cf02e572440ecf2f3bdd60d4404b20fddf64b", size = 281158, upload-time = "2025-08-03T05:05:40.352Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/0b/ccf4d0b152a6a11f0fc01e73978202fe0e8fe0e91e20941598e83a170bee/pyzmq-27.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:90a4da42aa322de8a3522461e3b5fe999935763b27f69a02fced40f4e3cf9682", size = 1329293, upload-time = "2025-08-03T05:02:56.001Z" }, + { url = "https://files.pythonhosted.org/packages/bc/76/48706d291951b1300d3cf985e503806901164bf1581f27c4b6b22dbab2fa/pyzmq-27.0.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e648dca28178fc879c814cf285048dd22fd1f03e1104101106505ec0eea50a4d", size = 905953, upload-time = "2025-08-03T05:02:59.061Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8a/df3135b96712068d184c53120c7dbf3023e5e362a113059a4f85cd36c6a0/pyzmq-27.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bca8abc31799a6f3652d13f47e0b0e1cab76f9125f2283d085a3754f669b607", size = 666165, upload-time = "2025-08-03T05:03:00.789Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ed/341a7148e08d2830f480f53ab3d136d88fc5011bb367b516d95d0ebb46dd/pyzmq-27.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:092f4011b26d6b0201002f439bd74b38f23f3aefcb358621bdc3b230afc9b2d5", size = 853756, upload-time = "2025-08-03T05:03:03.347Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bc/d26fe010477c3e901f0f5a3e70446950dde9aa217f1d1a13534eb0fccfe5/pyzmq-27.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f02f30a4a6b3efe665ab13a3dd47109d80326c8fd286311d1ba9f397dc5f247", size = 1654870, upload-time = "2025-08-03T05:03:05.331Z" }, + { url = "https://files.pythonhosted.org/packages/32/21/9b488086bf3f55b2eb26db09007a3962f62f3b81c5c6295a6ff6aaebd69c/pyzmq-27.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f293a1419266e3bf3557d1f8778f9e1ffe7e6b2c8df5c9dca191caf60831eb74", size = 2033444, upload-time = "2025-08-03T05:03:07.318Z" }, + { url = "https://files.pythonhosted.org/packages/3d/53/85b64a792223cd43393d25e03c8609df41aac817ea5ce6a27eceeed433ee/pyzmq-27.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ce181dd1a7c6c012d0efa8ab603c34b5ee9d86e570c03415bbb1b8772eeb381c", size = 1891289, upload-time = "2025-08-03T05:03:08.96Z" }, + { url = "https://files.pythonhosted.org/packages/23/5b/078aae8fe1c4cdba1a77a598870c548fd52b4d4a11e86b8116bbef47d9f3/pyzmq-27.0.1-cp310-cp310-win32.whl", hash = "sha256:f65741cc06630652e82aa68ddef4986a3ab9073dd46d59f94ce5f005fa72037c", size = 566693, upload-time = "2025-08-03T05:03:10.711Z" }, + { url = "https://files.pythonhosted.org/packages/24/e1/4471fff36416ebf1ffe43577b9c7dcf2ff4798f2171f0d169640a48d2305/pyzmq-27.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:44909aa3ed2234d69fe81e1dade7be336bcfeab106e16bdaa3318dcde4262b93", size = 631649, upload-time = "2025-08-03T05:03:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/e8/4c/8edac8dd56f223124aa40403d2c097bbad9b0e2868a67cad9a2a029863aa/pyzmq-27.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:4401649bfa0a38f0f8777f8faba7cd7eb7b5b8ae2abc7542b830dd09ad4aed0d", size = 559274, upload-time = "2025-08-03T05:03:13.728Z" }, + { url = "https://files.pythonhosted.org/packages/ae/18/a8e0da6ababbe9326116fb1c890bf1920eea880e8da621afb6bc0f39a262/pyzmq-27.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9729190bd770314f5fbba42476abf6abe79a746eeda11d1d68fd56dd70e5c296", size = 1332721, upload-time = "2025-08-03T05:03:15.237Z" }, + { url = "https://files.pythonhosted.org/packages/75/a4/9431ba598651d60ebd50dc25755402b770322cf8432adcc07d2906e53a54/pyzmq-27.0.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:696900ef6bc20bef6a242973943574f96c3f97d2183c1bd3da5eea4f559631b1", size = 908249, upload-time = "2025-08-03T05:03:16.933Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/e624e1793689e4e685d2ee21c40277dd4024d9d730af20446d88f69be838/pyzmq-27.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f96a63aecec22d3f7fdea3c6c98df9e42973f5856bb6812c3d8d78c262fee808", size = 668649, upload-time = "2025-08-03T05:03:18.49Z" }, + { url = "https://files.pythonhosted.org/packages/6c/29/0652a39d4e876e0d61379047ecf7752685414ad2e253434348246f7a2a39/pyzmq-27.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c512824360ea7490390566ce00bee880e19b526b312b25cc0bc30a0fe95cb67f", size = 856601, upload-time = "2025-08-03T05:03:20.194Z" }, + { url = "https://files.pythonhosted.org/packages/36/2d/8d5355d7fc55bb6e9c581dd74f58b64fa78c994079e3a0ea09b1b5627cde/pyzmq-27.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dfb2bb5e0f7198eaacfb6796fb0330afd28f36d985a770745fba554a5903595a", size = 1657750, upload-time = "2025-08-03T05:03:22.055Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f4/cd032352d5d252dc6f5ee272a34b59718ba3af1639a8a4ef4654f9535cf5/pyzmq-27.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f6886c59ba93ffde09b957d3e857e7950c8fe818bd5494d9b4287bc6d5bc7f1", size = 2034312, upload-time = "2025-08-03T05:03:23.578Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1a/c050d8b6597200e97a4bd29b93c769d002fa0b03083858227e0376ad59bc/pyzmq-27.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b99ea9d330e86ce1ff7f2456b33f1bf81c43862a5590faf4ef4ed3a63504bdab", size = 1893632, upload-time = "2025-08-03T05:03:25.167Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/173ce21d5097e7fcf284a090e8beb64fc683c6582b1f00fa52b1b7e867ce/pyzmq-27.0.1-cp311-cp311-win32.whl", hash = "sha256:571f762aed89025ba8cdcbe355fea56889715ec06d0264fd8b6a3f3fa38154ed", size = 566587, upload-time = "2025-08-03T05:03:26.769Z" }, + { url = "https://files.pythonhosted.org/packages/53/ab/22bd33e7086f0a2cc03a5adabff4bde414288bb62a21a7820951ef86ec20/pyzmq-27.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee16906c8025fa464bea1e48128c048d02359fb40bebe5333103228528506530", size = 632873, upload-time = "2025-08-03T05:03:28.685Z" }, + { url = "https://files.pythonhosted.org/packages/90/14/3e59b4a28194285ceeff725eba9aa5ba8568d1cb78aed381dec1537c705a/pyzmq-27.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:ba068f28028849da725ff9185c24f832ccf9207a40f9b28ac46ab7c04994bd41", size = 558918, upload-time = "2025-08-03T05:03:30.085Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9b/c0957041067c7724b310f22c398be46399297c12ed834c3bc42200a2756f/pyzmq-27.0.1-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:af7ebce2a1e7caf30c0bb64a845f63a69e76a2fadbc1cac47178f7bb6e657bdd", size = 1305432, upload-time = "2025-08-03T05:03:32.177Z" }, + { url = "https://files.pythonhosted.org/packages/8e/55/bd3a312790858f16b7def3897a0c3eb1804e974711bf7b9dcb5f47e7f82c/pyzmq-27.0.1-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:8f617f60a8b609a13099b313e7e525e67f84ef4524b6acad396d9ff153f6e4cd", size = 895095, upload-time = "2025-08-03T05:03:33.918Z" }, + { url = "https://files.pythonhosted.org/packages/20/50/fc384631d8282809fb1029a4460d2fe90fa0370a0e866a8318ed75c8d3bb/pyzmq-27.0.1-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d59dad4173dc2a111f03e59315c7bd6e73da1a9d20a84a25cf08325b0582b1a", size = 651826, upload-time = "2025-08-03T05:03:35.818Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0a/2356305c423a975000867de56888b79e44ec2192c690ff93c3109fd78081/pyzmq-27.0.1-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f5b6133c8d313bde8bd0d123c169d22525300ff164c2189f849de495e1344577", size = 839751, upload-time = "2025-08-03T05:03:37.265Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1b/81e95ad256ca7e7ccd47f5294c1c6da6e2b64fbace65b84fe8a41470342e/pyzmq-27.0.1-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:58cca552567423f04d06a075f4b473e78ab5bdb906febe56bf4797633f54aa4e", size = 1641359, upload-time = "2025-08-03T05:03:38.799Z" }, + { url = "https://files.pythonhosted.org/packages/50/63/9f50ec965285f4e92c265c8f18344e46b12803666d8b73b65d254d441435/pyzmq-27.0.1-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:4b9d8e26fb600d0d69cc9933e20af08552e97cc868a183d38a5c0d661e40dfbb", size = 2020281, upload-time = "2025-08-03T05:03:40.338Z" }, + { url = "https://files.pythonhosted.org/packages/02/4a/19e3398d0dc66ad2b463e4afa1fc541d697d7bc090305f9dfb948d3dfa29/pyzmq-27.0.1-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2329f0c87f0466dce45bba32b63f47018dda5ca40a0085cc5c8558fea7d9fc55", size = 1877112, upload-time = "2025-08-03T05:03:42.012Z" }, + { url = "https://files.pythonhosted.org/packages/bf/42/c562e9151aa90ed1d70aac381ea22a929d6b3a2ce4e1d6e2e135d34fd9c6/pyzmq-27.0.1-cp312-abi3-win32.whl", hash = "sha256:57bb92abdb48467b89c2d21da1ab01a07d0745e536d62afd2e30d5acbd0092eb", size = 558177, upload-time = "2025-08-03T05:03:43.979Z" }, + { url = "https://files.pythonhosted.org/packages/40/96/5c50a7d2d2b05b19994bf7336b97db254299353dd9b49b565bb71b485f03/pyzmq-27.0.1-cp312-abi3-win_amd64.whl", hash = "sha256:ff3f8757570e45da7a5bedaa140489846510014f7a9d5ee9301c61f3f1b8a686", size = 618923, upload-time = "2025-08-03T05:03:45.438Z" }, + { url = "https://files.pythonhosted.org/packages/13/33/1ec89c8f21c89d21a2eaff7def3676e21d8248d2675705e72554fb5a6f3f/pyzmq-27.0.1-cp312-abi3-win_arm64.whl", hash = "sha256:df2c55c958d3766bdb3e9d858b911288acec09a9aab15883f384fc7180df5bed", size = 552358, upload-time = "2025-08-03T05:03:46.887Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a0/f26e276211ec8090a4d11e4ec70eb8a8b15781e591c1d44ce62f372963a0/pyzmq-27.0.1-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:497bd8af534ae55dc4ef67eebd1c149ff2a0b0f1e146db73c8b5a53d83c1a5f5", size = 1122287, upload-time = "2025-08-03T05:03:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d8/af4b507e4f7eeea478cc8ee873995a6fd55582bfb99140593ed460e1db3c/pyzmq-27.0.1-cp313-cp313-android_24_x86_64.whl", hash = "sha256:a066ea6ad6218b4c233906adf0ae67830f451ed238419c0db609310dd781fbe7", size = 1155756, upload-time = "2025-08-03T05:03:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/ac/55/37fae0013e11f88681da42698e550b08a316d608242551f65095cc99232a/pyzmq-27.0.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:72d235d6365ca73d8ce92f7425065d70f5c1e19baa458eb3f0d570e425b73a96", size = 1340826, upload-time = "2025-08-03T05:03:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e4/3a87854c64b26fcf63a9d1b6f4382bd727d4797c772ceb334a97b7489be9/pyzmq-27.0.1-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:313a7b374e3dc64848644ca348a51004b41726f768b02e17e689f1322366a4d9", size = 897283, upload-time = "2025-08-03T05:03:54.167Z" }, + { url = "https://files.pythonhosted.org/packages/17/3e/4296c6b0ad2d07be11ae1395dccf9cae48a0a655cf9be1c3733ad2b591d1/pyzmq-27.0.1-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:119ce8590409702394f959c159d048002cbed2f3c0645ec9d6a88087fc70f0f1", size = 660565, upload-time = "2025-08-03T05:03:56.152Z" }, + { url = "https://files.pythonhosted.org/packages/72/41/a33ba3aa48b45b23c4cd4ac49aafde46f3e0f81939f2bfb3b6171a437122/pyzmq-27.0.1-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45c3e00ce16896ace2cd770ab9057a7cf97d4613ea5f2a13f815141d8b6894b9", size = 847680, upload-time = "2025-08-03T05:03:57.696Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/bf2350bb25b3b58d2e5b5d2290ffab0e923f0cc6d02288d3fbf4baa6e4d1/pyzmq-27.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:678e50ec112bdc6df5a83ac259a55a4ba97a8b314c325ab26b3b5b071151bc61", size = 1650151, upload-time = "2025-08-03T05:03:59.387Z" }, + { url = "https://files.pythonhosted.org/packages/f7/1a/a5a07c54890891344a8ddc3d5ab320dd3c4e39febb6e4472546e456d5157/pyzmq-27.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d0b96c30be9f9387b18b18b6133c75a7b1b0065da64e150fe1feb5ebf31ece1c", size = 2023766, upload-time = "2025-08-03T05:04:01.883Z" }, + { url = "https://files.pythonhosted.org/packages/62/5e/514dcff08f02c6c8a45a6e23621901139cf853be7ac5ccd0b9407c3aa3de/pyzmq-27.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88dc92d9eb5ea4968123e74db146d770b0c8d48f0e2bfb1dbc6c50a8edb12d64", size = 1885195, upload-time = "2025-08-03T05:04:03.923Z" }, + { url = "https://files.pythonhosted.org/packages/c8/91/87f74f98a487fbef0b115f6025e4a295129fd56b2b633a03ba7d5816ecc2/pyzmq-27.0.1-cp313-cp313t-win32.whl", hash = "sha256:6dcbcb34f5c9b0cefdfc71ff745459241b7d3cda5b27c7ad69d45afc0821d1e1", size = 574213, upload-time = "2025-08-03T05:04:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/07f7d0d7f4c81e08be7b60e52ff2591c557377c017f96204d33d5fca1b07/pyzmq-27.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9fd0fda730461f510cfd9a40fafa5355d65f5e3dbdd8d6dfa342b5b3f5d1949", size = 640202, upload-time = "2025-08-03T05:04:07.439Z" }, + { url = "https://files.pythonhosted.org/packages/ab/83/21d66bcef6fb803647a223cbde95111b099e2176277c0cbc8b099c485510/pyzmq-27.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:56a3b1853f3954ec1f0e91085f1350cc57d18f11205e4ab6e83e4b7c414120e0", size = 561514, upload-time = "2025-08-03T05:04:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0b/d5ea75cf46b52cdce85a85200c963cb498932953df443892238be49b1a01/pyzmq-27.0.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f98f6b7787bd2beb1f0dde03f23a0621a0c978edf673b7d8f5e7bc039cbe1b60", size = 1340836, upload-time = "2025-08-03T05:04:10.774Z" }, + { url = "https://files.pythonhosted.org/packages/be/4c/0dbce882550e17db6846b29e9dc242aea7590e7594e1ca5043e8e58fff2d/pyzmq-27.0.1-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:351bf5d8ca0788ca85327fda45843b6927593ff4c807faee368cc5aaf9f809c2", size = 897236, upload-time = "2025-08-03T05:04:13.221Z" }, + { url = "https://files.pythonhosted.org/packages/1b/22/461e131cf16b8814f3c356fa1ea0912697dbc4c64cddf01f7756ec704c1e/pyzmq-27.0.1-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5268a5a9177afff53dc6d70dffe63114ba2a6e7b20d9411cc3adeba09eeda403", size = 660374, upload-time = "2025-08-03T05:04:15.032Z" }, + { url = "https://files.pythonhosted.org/packages/3f/0c/bbd65a814395bf4fc3e57c6c13af27601c07e4009bdfb75ebcf500537bbd/pyzmq-27.0.1-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a4aca06ba295aa78bec9b33ec028d1ca08744c36294338c41432b7171060c808", size = 847497, upload-time = "2025-08-03T05:04:16.967Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/3d1f4a03b561d824cbd491394f67591957e2f1acf6dc85d96f970312a76a/pyzmq-27.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1c363c6dc66352331d5ad64bb838765c6692766334a6a02fdb05e76bd408ae18", size = 1650028, upload-time = "2025-08-03T05:04:19.398Z" }, + { url = "https://files.pythonhosted.org/packages/41/c9/a3987540f59a412bdaae3f362f78e00e6769557a598c63b7e32956aade5a/pyzmq-27.0.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:87aebf4acd7249bdff8d3df03aed4f09e67078e6762cfe0aecf8d0748ff94cde", size = 2023808, upload-time = "2025-08-03T05:04:21.145Z" }, + { url = "https://files.pythonhosted.org/packages/b0/a5/c388f4cd80498a8eaef7535f2a8eaca0a35b82b87a0b47fa1856fc135004/pyzmq-27.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e4f22d67756518d71901edf73b38dc0eb4765cce22c8fe122cc81748d425262b", size = 1884970, upload-time = "2025-08-03T05:04:22.908Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ac/b2a89a1ed90526a1b9a260cdc5cd42f055fd44ee8d2a59902b5ac35ddeb1/pyzmq-27.0.1-cp314-cp314t-win32.whl", hash = "sha256:8c62297bc7aea2147b472ca5ca2b4389377ad82898c87cabab2a94aedd75e337", size = 586905, upload-time = "2025-08-03T05:04:24.492Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/7aa5ea04e836f7a788b2a67405f83011cef59ca76d7bac91d1fc9a0476da/pyzmq-27.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:bee5248d5ec9223545f8cc4f368c2d571477ae828c99409125c3911511d98245", size = 660503, upload-time = "2025-08-03T05:04:26.382Z" }, + { url = "https://files.pythonhosted.org/packages/89/32/3836ed85947b06f1d67c07ce16c00b0cf8c053ab0b249d234f9f81ff95ff/pyzmq-27.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:0fc24bf45e4a454e55ef99d7f5c8b8712539200ce98533af25a5bfa954b6b390", size = 575098, upload-time = "2025-08-03T05:04:27.974Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f1/cdceaf9b6637570f36eee2dbd25bc5a800637cd9b4103b15fbc4b0658b82/pyzmq-27.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:05a94233fdde585eb70924a6e4929202a747eea6ed308a6171c4f1c715bbe39e", size = 1330651, upload-time = "2025-08-03T05:04:45.583Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/469d3b9315eb4d5c61c431a4ae8acdb6abb165dfa5ddbc7af639be53891c/pyzmq-27.0.1-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c96702e1082eab62ae583d64c4e19c9b848359196697e536a0c57ae9bd165bd5", size = 906524, upload-time = "2025-08-03T05:04:47.904Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c0/c7a12a533a87beb1143f4a9c8f4d6f82775c04eb3ad27f664e0ef00a6189/pyzmq-27.0.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c9180d1f5b4b73e28b64e63cc6c4c097690f102aa14935a62d5dd7426a4e5b5a", size = 863547, upload-time = "2025-08-03T05:04:49.579Z" }, + { url = "https://files.pythonhosted.org/packages/41/78/50907d004511bd23eae03d951f3ca4e4cc2e7eb5ec8d3df70d89eca3f97c/pyzmq-27.0.1-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e971d8680003d0af6020713e52f92109b46fedb463916e988814e04c8133578a", size = 666797, upload-time = "2025-08-03T05:04:51.263Z" }, + { url = "https://files.pythonhosted.org/packages/67/bd/ec3388888eda39705a4cefb465452a4bca5430a3435803588ced49943fdb/pyzmq-27.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe632fa4501154d58dfbe1764a0495734d55f84eaf1feda4549a1f1ca76659e9", size = 1655601, upload-time = "2025-08-03T05:04:53.026Z" }, + { url = "https://files.pythonhosted.org/packages/84/50/170a1671a171365dda677886d42c39629a086752696ede70296b8f6224d8/pyzmq-27.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4c3874344fd5fa6d58bb51919708048ac4cab21099f40a227173cddb76b4c20b", size = 2034120, upload-time = "2025-08-03T05:04:55.323Z" }, + { url = "https://files.pythonhosted.org/packages/a4/0a/f06841495e4ec33ed65588e94aff07f1dcbc6878e1611577f6b97a449068/pyzmq-27.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ec09073ed67ae236785d543df3b322282acc0bdf6d1b748c3e81f3043b21cb5", size = 1891956, upload-time = "2025-08-03T05:04:57.084Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6b/6ba945a4756e4b1ba69b909d2b040d16aff0f0edd56a60874970b8d47237/pyzmq-27.0.1-cp39-cp39-win32.whl", hash = "sha256:f44e7ea288d022d4bf93b9e79dafcb4a7aea45a3cbeae2116792904931cefccf", size = 567388, upload-time = "2025-08-03T05:04:58.704Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b4/8ffb9cfb363bc9d61c5d8d9f79a7ada572b0865dac9f4a547da901b81d76/pyzmq-27.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ffe6b809a97ac6dea524b3b837d5b28743d8c2f121141056d168ff0ba8f614ef", size = 632004, upload-time = "2025-08-03T05:05:00.434Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4b/dd5c4d3bb7261efb30a909d2df447ac77393653e5c34c8a9cd536f429c3e/pyzmq-27.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:fde26267416c8478c95432c81489b53f57b0b5d24cd5c8bfaebf5bbaac4dc90c", size = 559881, upload-time = "2025-08-03T05:05:02.363Z" }, + { url = "https://files.pythonhosted.org/packages/6f/87/fc96f224dd99070fe55d0afc37ac08d7d4635d434e3f9425b232867e01b9/pyzmq-27.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:544b995a6a1976fad5d7ff01409b4588f7608ccc41be72147700af91fd44875d", size = 835950, upload-time = "2025-08-03T05:05:04.193Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/802d96017f176c3a7285603d9ed2982550095c136c6230d3e0b53f52c7e5/pyzmq-27.0.1-pp310-pypy310_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0f772eea55cccce7f45d6ecdd1d5049c12a77ec22404f6b892fae687faa87bee", size = 799876, upload-time = "2025-08-03T05:05:06.263Z" }, + { url = "https://files.pythonhosted.org/packages/4e/52/49045c6528007cce385f218f3a674dc84fc8b3265330d09e57c0a59b41f4/pyzmq-27.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9d63d66059114a6756d09169c9209ffceabacb65b9cb0f66e6fc344b20b73e6", size = 567402, upload-time = "2025-08-03T05:05:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/c29ac0d5a817543ecf0cb18f17195805bad0da567a1c64644aacf11b2779/pyzmq-27.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1da8e645c655d86f0305fb4c65a0d848f461cd90ee07d21f254667287b5dbe50", size = 747030, upload-time = "2025-08-03T05:05:10.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d1/cc1fbfb65b4042016e4e035b2548cdfe0945c817345df83aa2d98490e7fc/pyzmq-27.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1843fd0daebcf843fe6d4da53b8bdd3fc906ad3e97d25f51c3fed44436d82a49", size = 544567, upload-time = "2025-08-03T05:05:11.856Z" }, + { url = "https://files.pythonhosted.org/packages/b4/1a/49f66fe0bc2b2568dd4280f1f520ac8fafd73f8d762140e278d48aeaf7b9/pyzmq-27.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7fb0ee35845bef1e8c4a152d766242164e138c239e3182f558ae15cb4a891f94", size = 835949, upload-time = "2025-08-03T05:05:13.798Z" }, + { url = "https://files.pythonhosted.org/packages/49/94/443c1984b397eab59b14dd7ae8bc2ac7e8f32dbc646474453afcaa6508c4/pyzmq-27.0.1-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f379f11e138dfd56c3f24a04164f871a08281194dd9ddf656a278d7d080c8ad0", size = 799875, upload-time = "2025-08-03T05:05:15.632Z" }, + { url = "https://files.pythonhosted.org/packages/30/f1/fd96138a0f152786a2ba517e9c6a8b1b3516719e412a90bb5d8eea6b660c/pyzmq-27.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b978c0678cffbe8860ec9edc91200e895c29ae1ac8a7085f947f8e8864c489fb", size = 567403, upload-time = "2025-08-03T05:05:17.326Z" }, + { url = "https://files.pythonhosted.org/packages/16/57/34e53ef2b55b1428dac5aabe3a974a16c8bda3bf20549ba500e3ff6cb426/pyzmq-27.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ebccf0d760bc92a4a7c751aeb2fef6626144aace76ee8f5a63abeb100cae87f", size = 747032, upload-time = "2025-08-03T05:05:19.074Z" }, + { url = "https://files.pythonhosted.org/packages/81/b7/769598c5ae336fdb657946950465569cf18803140fe89ce466d7f0a57c11/pyzmq-27.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:77fed80e30fa65708546c4119840a46691290efc231f6bfb2ac2a39b52e15811", size = 544566, upload-time = "2025-08-03T05:05:20.798Z" }, + { url = "https://files.pythonhosted.org/packages/60/8d/c0880acd2d5908eec6fe9b399f0fb630e5f203f8a69f82442d5cb2b2f46c/pyzmq-27.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d97b59cbd8a6c8b23524a8ce237ff9504d987dc07156258aa68ae06d2dd5f34d", size = 835946, upload-time = "2025-08-03T05:05:31.161Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6b71409aa6629b3d4917b38961501898827f4fb5ddc680cc8e0cb13987f3/pyzmq-27.0.1-pp39-pypy39_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:27a78bdd384dbbe7b357af95f72efe8c494306b5ec0a03c31e2d53d6763e5307", size = 799870, upload-time = "2025-08-03T05:05:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/5d36d8f6571478f32c32f5872abd76eda052746283ca87e24cc5758f7987/pyzmq-27.0.1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b007e5dcba684e888fbc90554cb12a2f4e492927c8c2761a80b7590209821743", size = 758371, upload-time = "2025-08-03T05:05:34.722Z" }, + { url = "https://files.pythonhosted.org/packages/6f/29/6a7b7f5d47712487d8a3516584a4a484a0147f2537228237397793b2de69/pyzmq-27.0.1-pp39-pypy39_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:95594b2ceeaa94934e3e94dd7bf5f3c3659cf1a26b1fb3edcf6e42dad7e0eaf2", size = 567395, upload-time = "2025-08-03T05:05:36.701Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/c1f26d13e9d4c3bfce42fead8ff640f6c06a58decde49a6b295b9d52cefd/pyzmq-27.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:70b719a130b81dd130a57ac0ff636dc2c0127c5b35ca5467d1b67057e3c7a4d2", size = 544561, upload-time = "2025-08-03T05:05:38.608Z" }, ] [[package]] name = "regex" -version = "2024.11.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91", size = 482674, upload-time = "2024-11-06T20:08:57.575Z" }, - { url = "https://files.pythonhosted.org/packages/15/51/9f35d12da8434b489c7b7bffc205c474a0a9432a889457026e9bc06a297a/regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0", size = 287684, upload-time = "2024-11-06T20:08:59.787Z" }, - { url = "https://files.pythonhosted.org/packages/bd/18/b731f5510d1b8fb63c6b6d3484bfa9a59b84cc578ac8b5172970e05ae07c/regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e", size = 284589, upload-time = "2024-11-06T20:09:01.896Z" }, - { url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde", size = 782511, upload-time = "2024-11-06T20:09:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e", size = 821149, upload-time = "2024-11-06T20:09:06.237Z" }, - { url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2", size = 809707, upload-time = "2024-11-06T20:09:07.715Z" }, - { url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf", size = 781702, upload-time = "2024-11-06T20:09:10.101Z" }, - { url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c", size = 771976, upload-time = "2024-11-06T20:09:11.566Z" }, - { url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86", size = 697397, upload-time = "2024-11-06T20:09:13.119Z" }, - { url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67", size = 768726, upload-time = "2024-11-06T20:09:14.85Z" }, - { url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d", size = 775098, upload-time = "2024-11-06T20:09:16.504Z" }, - { url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2", size = 839325, upload-time = "2024-11-06T20:09:18.698Z" }, - { url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008", size = 843277, upload-time = "2024-11-06T20:09:21.725Z" }, - { url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62", size = 773197, upload-time = "2024-11-06T20:09:24.092Z" }, - { url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e", size = 261714, upload-time = "2024-11-06T20:09:26.36Z" }, - { url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519", size = 274042, upload-time = "2024-11-06T20:09:28.762Z" }, - { url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638", size = 482669, upload-time = "2024-11-06T20:09:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7", size = 287684, upload-time = "2024-11-06T20:09:32.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20", size = 284589, upload-time = "2024-11-06T20:09:35.504Z" }, - { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121, upload-time = "2024-11-06T20:09:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275, upload-time = "2024-11-06T20:09:40.371Z" }, - { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257, upload-time = "2024-11-06T20:09:43.059Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727, upload-time = "2024-11-06T20:09:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667, upload-time = "2024-11-06T20:09:49.828Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963, upload-time = "2024-11-06T20:09:51.819Z" }, - { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700, upload-time = "2024-11-06T20:09:53.982Z" }, - { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592, upload-time = "2024-11-06T20:09:56.222Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929, upload-time = "2024-11-06T20:09:58.642Z" }, - { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213, upload-time = "2024-11-06T20:10:00.867Z" }, - { url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9", size = 261734, upload-time = "2024-11-06T20:10:03.361Z" }, - { url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60", size = 274052, upload-time = "2024-11-06T20:10:05.179Z" }, - { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, - { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, - { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, - { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, - { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, - { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, - { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, - { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, - { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525, upload-time = "2024-11-06T20:10:45.19Z" }, - { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324, upload-time = "2024-11-06T20:10:47.177Z" }, - { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617, upload-time = "2024-11-06T20:10:49.312Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023, upload-time = "2024-11-06T20:10:51.102Z" }, - { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072, upload-time = "2024-11-06T20:10:52.926Z" }, - { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130, upload-time = "2024-11-06T20:10:54.828Z" }, - { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857, upload-time = "2024-11-06T20:10:56.634Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006, upload-time = "2024-11-06T20:10:59.369Z" }, - { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650, upload-time = "2024-11-06T20:11:02.042Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545, upload-time = "2024-11-06T20:11:03.933Z" }, - { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045, upload-time = "2024-11-06T20:11:06.497Z" }, - { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182, upload-time = "2024-11-06T20:11:09.06Z" }, - { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733, upload-time = "2024-11-06T20:11:11.256Z" }, - { url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122, upload-time = "2024-11-06T20:11:13.161Z" }, - { url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545, upload-time = "2024-11-06T20:11:15Z" }, - { url = "https://files.pythonhosted.org/packages/89/23/c4a86df398e57e26f93b13ae63acce58771e04bdde86092502496fa57f9c/regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839", size = 482682, upload-time = "2024-11-06T20:11:52.65Z" }, - { url = "https://files.pythonhosted.org/packages/3c/8b/45c24ab7a51a1658441b961b86209c43e6bb9d39caf1e63f46ce6ea03bc7/regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e", size = 287679, upload-time = "2024-11-06T20:11:55.011Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d1/598de10b17fdafc452d11f7dada11c3be4e379a8671393e4e3da3c4070df/regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf", size = 284578, upload-time = "2024-11-06T20:11:57.033Z" }, - { url = "https://files.pythonhosted.org/packages/49/70/c7eaa219efa67a215846766fde18d92d54cb590b6a04ffe43cef30057622/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b", size = 782012, upload-time = "2024-11-06T20:11:59.218Z" }, - { url = "https://files.pythonhosted.org/packages/89/e5/ef52c7eb117dd20ff1697968219971d052138965a4d3d9b95e92e549f505/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0", size = 820580, upload-time = "2024-11-06T20:12:01.969Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3f/9f5da81aff1d4167ac52711acf789df13e789fe6ac9545552e49138e3282/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b", size = 809110, upload-time = "2024-11-06T20:12:04.786Z" }, - { url = "https://files.pythonhosted.org/packages/86/44/2101cc0890c3621b90365c9ee8d7291a597c0722ad66eccd6ffa7f1bcc09/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef", size = 780919, upload-time = "2024-11-06T20:12:06.944Z" }, - { url = "https://files.pythonhosted.org/packages/ce/2e/3e0668d8d1c7c3c0d397bf54d92fc182575b3a26939aed5000d3cc78760f/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48", size = 771515, upload-time = "2024-11-06T20:12:09.9Z" }, - { url = "https://files.pythonhosted.org/packages/a6/49/1bc4584254355e3dba930a3a2fd7ad26ccba3ebbab7d9100db0aff2eedb0/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13", size = 696957, upload-time = "2024-11-06T20:12:12.319Z" }, - { url = "https://files.pythonhosted.org/packages/c8/dd/42879c1fc8a37a887cd08e358af3d3ba9e23038cd77c7fe044a86d9450ba/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2", size = 768088, upload-time = "2024-11-06T20:12:15.149Z" }, - { url = "https://files.pythonhosted.org/packages/89/96/c05a0fe173cd2acd29d5e13c1adad8b706bcaa71b169e1ee57dcf2e74584/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95", size = 774752, upload-time = "2024-11-06T20:12:17.416Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f3/a757748066255f97f14506483436c5f6aded7af9e37bca04ec30c90ca683/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9", size = 838862, upload-time = "2024-11-06T20:12:19.639Z" }, - { url = "https://files.pythonhosted.org/packages/5c/93/c6d2092fd479dcaeea40fc8fa673822829181ded77d294a7f950f1dda6e2/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f", size = 842622, upload-time = "2024-11-06T20:12:21.841Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9c/daa99532c72f25051a90ef90e1413a8d54413a9e64614d9095b0c1c154d0/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b", size = 772713, upload-time = "2024-11-06T20:12:24.785Z" }, - { url = "https://files.pythonhosted.org/packages/13/5d/61a533ccb8c231b474ac8e3a7d70155b00dfc61af6cafdccd1947df6d735/regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57", size = 261756, upload-time = "2024-11-06T20:12:26.975Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7b/e59b7f7c91ae110d154370c24133f947262525b5d6406df65f23422acc17/regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983", size = 274110, upload-time = "2024-11-06T20:12:29.368Z" }, +version = "2025.7.34" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/de/e13fa6dc61d78b30ba47481f99933a3b49a57779d625c392d8036770a60d/regex-2025.7.34.tar.gz", hash = "sha256:9ead9765217afd04a86822dfcd4ed2747dfe426e887da413b15ff0ac2457e21a", size = 400714, upload-time = "2025-07-31T00:21:16.262Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/d2/0a44a9d92370e5e105f16669acf801b215107efea9dea4317fe96e9aad67/regex-2025.7.34-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d856164d25e2b3b07b779bfed813eb4b6b6ce73c2fd818d46f47c1eb5cd79bd6", size = 484591, upload-time = "2025-07-31T00:18:46.675Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b1/00c4f83aa902f1048495de9f2f33638ce970ce1cf9447b477d272a0e22bb/regex-2025.7.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d15a9da5fad793e35fb7be74eec450d968e05d2e294f3e0e77ab03fa7234a83", size = 289293, upload-time = "2025-07-31T00:18:53.069Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b0/5bc5c8ddc418e8be5530b43ae1f7c9303f43aeff5f40185c4287cf6732f2/regex-2025.7.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95b4639c77d414efa93c8de14ce3f7965a94d007e068a94f9d4997bb9bd9c81f", size = 285932, upload-time = "2025-07-31T00:18:54.673Z" }, + { url = "https://files.pythonhosted.org/packages/46/c7/a1a28d050b23665a5e1eeb4d7f13b83ea86f0bc018da7b8f89f86ff7f094/regex-2025.7.34-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7de1ceed5a5f84f342ba4a9f4ae589524adf9744b2ee61b5da884b5b659834", size = 780361, upload-time = "2025-07-31T00:18:56.13Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0d/82e7afe7b2c9fe3d488a6ab6145d1d97e55f822dfb9b4569aba2497e3d09/regex-2025.7.34-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02e5860a250cd350c4933cf376c3bc9cb28948e2c96a8bc042aee7b985cfa26f", size = 849176, upload-time = "2025-07-31T00:18:57.483Z" }, + { url = "https://files.pythonhosted.org/packages/bf/16/3036e16903d8194f1490af457a7e33b06d9e9edd9576b1fe6c7ac660e9ed/regex-2025.7.34-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a5966220b9a1a88691282b7e4350e9599cf65780ca60d914a798cb791aa1177", size = 897222, upload-time = "2025-07-31T00:18:58.721Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c2/010e089ae00d31418e7d2c6601760eea1957cde12be719730c7133b8c165/regex-2025.7.34-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48fb045bbd4aab2418dc1ba2088a5e32de4bfe64e1457b948bb328a8dc2f1c2e", size = 789831, upload-time = "2025-07-31T00:19:00.436Z" }, + { url = "https://files.pythonhosted.org/packages/dd/86/b312b7bf5c46d21dbd9a3fdc4a80fde56ea93c9c0b89cf401879635e094d/regex-2025.7.34-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:20ff8433fa45e131f7316594efe24d4679c5449c0ca69d91c2f9d21846fdf064", size = 780665, upload-time = "2025-07-31T00:19:01.828Z" }, + { url = "https://files.pythonhosted.org/packages/40/e5/674b82bfff112c820b09e3c86a423d4a568143ede7f8440fdcbce259e895/regex-2025.7.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c436fd1e95c04c19039668cfb548450a37c13f051e8659f40aed426e36b3765f", size = 773511, upload-time = "2025-07-31T00:19:03.654Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/39e7c578eb6cf1454db2b64e4733d7e4f179714867a75d84492ec44fa9b2/regex-2025.7.34-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b85241d3cfb9f8a13cefdfbd58a2843f208f2ed2c88181bf84e22e0c7fc066d", size = 843990, upload-time = "2025-07-31T00:19:05.61Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d9/522a6715aefe2f463dc60c68924abeeb8ab6893f01adf5720359d94ede8c/regex-2025.7.34-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:075641c94126b064c65ab86e7e71fc3d63e7ff1bea1fb794f0773c97cdad3a03", size = 834676, upload-time = "2025-07-31T00:19:07.023Z" }, + { url = "https://files.pythonhosted.org/packages/59/53/c4d5284cb40543566542e24f1badc9f72af68d01db21e89e36e02292eee0/regex-2025.7.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:70645cad3407d103d1dbcb4841839d2946f7d36cf38acbd40120fee1682151e5", size = 778420, upload-time = "2025-07-31T00:19:08.511Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4a/b779a7707d4a44a7e6ee9d0d98e40b2a4de74d622966080e9c95e25e2d24/regex-2025.7.34-cp310-cp310-win32.whl", hash = "sha256:3b836eb4a95526b263c2a3359308600bd95ce7848ebd3c29af0c37c4f9627cd3", size = 263999, upload-time = "2025-07-31T00:19:10.072Z" }, + { url = "https://files.pythonhosted.org/packages/ef/6e/33c7583f5427aa039c28bff7f4103c2de5b6aa5b9edc330c61ec576b1960/regex-2025.7.34-cp310-cp310-win_amd64.whl", hash = "sha256:cbfaa401d77334613cf434f723c7e8ba585df162be76474bccc53ae4e5520b3a", size = 276023, upload-time = "2025-07-31T00:19:11.34Z" }, + { url = "https://files.pythonhosted.org/packages/9f/fc/00b32e0ac14213d76d806d952826402b49fd06d42bfabacdf5d5d016bc47/regex-2025.7.34-cp310-cp310-win_arm64.whl", hash = "sha256:bca11d3c38a47c621769433c47f364b44e8043e0de8e482c5968b20ab90a3986", size = 268357, upload-time = "2025-07-31T00:19:12.729Z" }, + { url = "https://files.pythonhosted.org/packages/0d/85/f497b91577169472f7c1dc262a5ecc65e39e146fc3a52c571e5daaae4b7d/regex-2025.7.34-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da304313761b8500b8e175eb2040c4394a875837d5635f6256d6fa0377ad32c8", size = 484594, upload-time = "2025-07-31T00:19:13.927Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c5/ad2a5c11ce9e6257fcbfd6cd965d07502f6054aaa19d50a3d7fd991ec5d1/regex-2025.7.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:35e43ebf5b18cd751ea81455b19acfdec402e82fe0dc6143edfae4c5c4b3909a", size = 289294, upload-time = "2025-07-31T00:19:15.395Z" }, + { url = "https://files.pythonhosted.org/packages/8e/01/83ffd9641fcf5e018f9b51aa922c3e538ac9439424fda3df540b643ecf4f/regex-2025.7.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96bbae4c616726f4661fe7bcad5952e10d25d3c51ddc388189d8864fbc1b3c68", size = 285933, upload-time = "2025-07-31T00:19:16.704Z" }, + { url = "https://files.pythonhosted.org/packages/77/20/5edab2e5766f0259bc1da7381b07ce6eb4401b17b2254d02f492cd8a81a8/regex-2025.7.34-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9feab78a1ffa4f2b1e27b1bcdaad36f48c2fed4870264ce32f52a393db093c78", size = 792335, upload-time = "2025-07-31T00:19:18.561Z" }, + { url = "https://files.pythonhosted.org/packages/30/bd/744d3ed8777dce8487b2606b94925e207e7c5931d5870f47f5b643a4580a/regex-2025.7.34-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f14b36e6d4d07f1a5060f28ef3b3561c5d95eb0651741474ce4c0a4c56ba8719", size = 858605, upload-time = "2025-07-31T00:19:20.204Z" }, + { url = "https://files.pythonhosted.org/packages/99/3d/93754176289718d7578c31d151047e7b8acc7a8c20e7706716f23c49e45e/regex-2025.7.34-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85c3a958ef8b3d5079c763477e1f09e89d13ad22198a37e9d7b26b4b17438b33", size = 905780, upload-time = "2025-07-31T00:19:21.876Z" }, + { url = "https://files.pythonhosted.org/packages/ee/2e/c689f274a92deffa03999a430505ff2aeace408fd681a90eafa92fdd6930/regex-2025.7.34-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37555e4ae0b93358fa7c2d240a4291d4a4227cc7c607d8f85596cdb08ec0a083", size = 798868, upload-time = "2025-07-31T00:19:23.222Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9e/39673688805d139b33b4a24851a71b9978d61915c4d72b5ffda324d0668a/regex-2025.7.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee38926f31f1aa61b0232a3a11b83461f7807661c062df9eb88769d86e6195c3", size = 781784, upload-time = "2025-07-31T00:19:24.59Z" }, + { url = "https://files.pythonhosted.org/packages/18/bd/4c1cab12cfabe14beaa076523056b8ab0c882a8feaf0a6f48b0a75dab9ed/regex-2025.7.34-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a664291c31cae9c4a30589bd8bc2ebb56ef880c9c6264cb7643633831e606a4d", size = 852837, upload-time = "2025-07-31T00:19:25.911Z" }, + { url = "https://files.pythonhosted.org/packages/cb/21/663d983cbb3bba537fc213a579abbd0f263fb28271c514123f3c547ab917/regex-2025.7.34-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f3e5c1e0925e77ec46ddc736b756a6da50d4df4ee3f69536ffb2373460e2dafd", size = 844240, upload-time = "2025-07-31T00:19:27.688Z" }, + { url = "https://files.pythonhosted.org/packages/8e/2d/9beeeb913bc5d32faa913cf8c47e968da936af61ec20af5d269d0f84a100/regex-2025.7.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d428fc7731dcbb4e2ffe43aeb8f90775ad155e7db4347a639768bc6cd2df881a", size = 787139, upload-time = "2025-07-31T00:19:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f5/9b9384415fdc533551be2ba805dd8c4621873e5df69c958f403bfd3b2b6e/regex-2025.7.34-cp311-cp311-win32.whl", hash = "sha256:e154a7ee7fa18333ad90b20e16ef84daaeac61877c8ef942ec8dfa50dc38b7a1", size = 264019, upload-time = "2025-07-31T00:19:31.129Z" }, + { url = "https://files.pythonhosted.org/packages/18/9d/e069ed94debcf4cc9626d652a48040b079ce34c7e4fb174f16874958d485/regex-2025.7.34-cp311-cp311-win_amd64.whl", hash = "sha256:24257953d5c1d6d3c129ab03414c07fc1a47833c9165d49b954190b2b7f21a1a", size = 276047, upload-time = "2025-07-31T00:19:32.497Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/3bafbe9d1fd1db77355e7fbbbf0d0cfb34501a8b8e334deca14f94c7b315/regex-2025.7.34-cp311-cp311-win_arm64.whl", hash = "sha256:3157aa512b9e606586900888cd469a444f9b898ecb7f8931996cb715f77477f0", size = 268362, upload-time = "2025-07-31T00:19:34.094Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f0/31d62596c75a33f979317658e8d261574785c6cd8672c06741ce2e2e2070/regex-2025.7.34-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7f7211a746aced993bef487de69307a38c5ddd79257d7be83f7b202cb59ddb50", size = 485492, upload-time = "2025-07-31T00:19:35.57Z" }, + { url = "https://files.pythonhosted.org/packages/d8/16/b818d223f1c9758c3434be89aa1a01aae798e0e0df36c1f143d1963dd1ee/regex-2025.7.34-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fb31080f2bd0681484b275461b202b5ad182f52c9ec606052020fe13eb13a72f", size = 290000, upload-time = "2025-07-31T00:19:37.175Z" }, + { url = "https://files.pythonhosted.org/packages/cd/70/69506d53397b4bd6954061bae75677ad34deb7f6ca3ba199660d6f728ff5/regex-2025.7.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0200a5150c4cf61e407038f4b4d5cdad13e86345dac29ff9dab3d75d905cf130", size = 286072, upload-time = "2025-07-31T00:19:38.612Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/536a216d5f66084fb577bb0543b5cb7de3272eb70a157f0c3a542f1c2551/regex-2025.7.34-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:739a74970e736df0773788377969c9fea3876c2fc13d0563f98e5503e5185f46", size = 797341, upload-time = "2025-07-31T00:19:40.119Z" }, + { url = "https://files.pythonhosted.org/packages/26/af/733f8168449e56e8f404bb807ea7189f59507cbea1b67a7bbcd92f8bf844/regex-2025.7.34-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4fef81b2f7ea6a2029161ed6dea9ae13834c28eb5a95b8771828194a026621e4", size = 862556, upload-time = "2025-07-31T00:19:41.556Z" }, + { url = "https://files.pythonhosted.org/packages/19/dd/59c464d58c06c4f7d87de4ab1f590e430821345a40c5d345d449a636d15f/regex-2025.7.34-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ea74cf81fe61a7e9d77989050d0089a927ab758c29dac4e8e1b6c06fccf3ebf0", size = 910762, upload-time = "2025-07-31T00:19:43Z" }, + { url = "https://files.pythonhosted.org/packages/37/a8/b05ccf33ceca0815a1e253693b2c86544932ebcc0049c16b0fbdf18b688b/regex-2025.7.34-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e4636a7f3b65a5f340ed9ddf53585c42e3ff37101d383ed321bfe5660481744b", size = 801892, upload-time = "2025-07-31T00:19:44.645Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/b993cb2e634cc22810afd1652dba0cae156c40d4864285ff486c73cd1996/regex-2025.7.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cef962d7834437fe8d3da6f9bfc6f93f20f218266dcefec0560ed7765f5fe01", size = 786551, upload-time = "2025-07-31T00:19:46.127Z" }, + { url = "https://files.pythonhosted.org/packages/2d/79/7849d67910a0de4e26834b5bb816e028e35473f3d7ae563552ea04f58ca2/regex-2025.7.34-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:cbe1698e5b80298dbce8df4d8d1182279fbdaf1044e864cbc9d53c20e4a2be77", size = 856457, upload-time = "2025-07-31T00:19:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/91/c6/de516bc082524b27e45cb4f54e28bd800c01efb26d15646a65b87b13a91e/regex-2025.7.34-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:32b9f9bcf0f605eb094b08e8da72e44badabb63dde6b83bd530580b488d1c6da", size = 848902, upload-time = "2025-07-31T00:19:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/22/519ff8ba15f732db099b126f039586bd372da6cd4efb810d5d66a5daeda1/regex-2025.7.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:524c868ba527eab4e8744a9287809579f54ae8c62fbf07d62aacd89f6026b282", size = 788038, upload-time = "2025-07-31T00:19:50.794Z" }, + { url = "https://files.pythonhosted.org/packages/3f/7d/aabb467d8f57d8149895d133c88eb809a1a6a0fe262c1d508eb9dfabb6f9/regex-2025.7.34-cp312-cp312-win32.whl", hash = "sha256:d600e58ee6d036081c89696d2bdd55d507498a7180df2e19945c6642fac59588", size = 264417, upload-time = "2025-07-31T00:19:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/3b/39/bd922b55a4fc5ad5c13753274e5b536f5b06ec8eb9747675668491c7ab7a/regex-2025.7.34-cp312-cp312-win_amd64.whl", hash = "sha256:9a9ab52a466a9b4b91564437b36417b76033e8778e5af8f36be835d8cb370d62", size = 275387, upload-time = "2025-07-31T00:19:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/f7/3c/c61d2fdcecb754a40475a3d1ef9a000911d3e3fc75c096acf44b0dfb786a/regex-2025.7.34-cp312-cp312-win_arm64.whl", hash = "sha256:c83aec91af9c6fbf7c743274fd952272403ad9a9db05fe9bfc9df8d12b45f176", size = 268482, upload-time = "2025-07-31T00:19:55.183Z" }, + { url = "https://files.pythonhosted.org/packages/15/16/b709b2119975035169a25aa8e4940ca177b1a2e25e14f8d996d09130368e/regex-2025.7.34-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3c9740a77aeef3f5e3aaab92403946a8d34437db930a0280e7e81ddcada61f5", size = 485334, upload-time = "2025-07-31T00:19:56.58Z" }, + { url = "https://files.pythonhosted.org/packages/94/a6/c09136046be0595f0331bc58a0e5f89c2d324cf734e0b0ec53cf4b12a636/regex-2025.7.34-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:69ed3bc611540f2ea70a4080f853741ec698be556b1df404599f8724690edbcd", size = 289942, upload-time = "2025-07-31T00:19:57.943Z" }, + { url = "https://files.pythonhosted.org/packages/36/91/08fc0fd0f40bdfb0e0df4134ee37cfb16e66a1044ac56d36911fd01c69d2/regex-2025.7.34-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d03c6f9dcd562c56527c42b8530aad93193e0b3254a588be1f2ed378cdfdea1b", size = 285991, upload-time = "2025-07-31T00:19:59.837Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/99dc8f6f756606f0c214d14c7b6c17270b6bbe26d5c1f05cde9dbb1c551f/regex-2025.7.34-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6164b1d99dee1dfad33f301f174d8139d4368a9fb50bf0a3603b2eaf579963ad", size = 797415, upload-time = "2025-07-31T00:20:01.668Z" }, + { url = "https://files.pythonhosted.org/packages/62/cf/2fcdca1110495458ba4e95c52ce73b361cf1cafd8a53b5c31542cde9a15b/regex-2025.7.34-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1e4f4f62599b8142362f164ce776f19d79bdd21273e86920a7b604a4275b4f59", size = 862487, upload-time = "2025-07-31T00:20:03.142Z" }, + { url = "https://files.pythonhosted.org/packages/90/38/899105dd27fed394e3fae45607c1983e138273ec167e47882fc401f112b9/regex-2025.7.34-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:72a26dcc6a59c057b292f39d41465d8233a10fd69121fa24f8f43ec6294e5415", size = 910717, upload-time = "2025-07-31T00:20:04.727Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f6/4716198dbd0bcc9c45625ac4c81a435d1c4d8ad662e8576dac06bab35b17/regex-2025.7.34-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5273fddf7a3e602695c92716c420c377599ed3c853ea669c1fe26218867002f", size = 801943, upload-time = "2025-07-31T00:20:07.1Z" }, + { url = "https://files.pythonhosted.org/packages/40/5d/cff8896d27e4e3dd11dd72ac78797c7987eb50fe4debc2c0f2f1682eb06d/regex-2025.7.34-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c1844be23cd40135b3a5a4dd298e1e0c0cb36757364dd6cdc6025770363e06c1", size = 786664, upload-time = "2025-07-31T00:20:08.818Z" }, + { url = "https://files.pythonhosted.org/packages/10/29/758bf83cf7b4c34f07ac3423ea03cee3eb3176941641e4ccc05620f6c0b8/regex-2025.7.34-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dde35e2afbbe2272f8abee3b9fe6772d9b5a07d82607b5788e8508974059925c", size = 856457, upload-time = "2025-07-31T00:20:10.328Z" }, + { url = "https://files.pythonhosted.org/packages/d7/30/c19d212b619963c5b460bfed0ea69a092c6a43cba52a973d46c27b3e2975/regex-2025.7.34-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f6e8e7af516a7549412ce57613e859c3be27d55341a894aacaa11703a4c31a", size = 849008, upload-time = "2025-07-31T00:20:11.823Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b8/3c35da3b12c87e3cc00010ef6c3a4ae787cff0bc381aa3d251def219969a/regex-2025.7.34-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:469142fb94a869beb25b5f18ea87646d21def10fbacb0bcb749224f3509476f0", size = 788101, upload-time = "2025-07-31T00:20:13.729Z" }, + { url = "https://files.pythonhosted.org/packages/47/80/2f46677c0b3c2b723b2c358d19f9346e714113865da0f5f736ca1a883bde/regex-2025.7.34-cp313-cp313-win32.whl", hash = "sha256:da7507d083ee33ccea1310447410c27ca11fb9ef18c95899ca57ff60a7e4d8f1", size = 264401, upload-time = "2025-07-31T00:20:15.233Z" }, + { url = "https://files.pythonhosted.org/packages/be/fa/917d64dd074682606a003cba33585c28138c77d848ef72fc77cbb1183849/regex-2025.7.34-cp313-cp313-win_amd64.whl", hash = "sha256:9d644de5520441e5f7e2db63aec2748948cc39ed4d7a87fd5db578ea4043d997", size = 275368, upload-time = "2025-07-31T00:20:16.711Z" }, + { url = "https://files.pythonhosted.org/packages/65/cd/f94383666704170a2154a5df7b16be28f0c27a266bffcd843e58bc84120f/regex-2025.7.34-cp313-cp313-win_arm64.whl", hash = "sha256:7bf1c5503a9f2cbd2f52d7e260acb3131b07b6273c470abb78568174fe6bde3f", size = 268482, upload-time = "2025-07-31T00:20:18.189Z" }, + { url = "https://files.pythonhosted.org/packages/ac/23/6376f3a23cf2f3c00514b1cdd8c990afb4dfbac3cb4a68b633c6b7e2e307/regex-2025.7.34-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:8283afe7042d8270cecf27cca558873168e771183d4d593e3c5fe5f12402212a", size = 485385, upload-time = "2025-07-31T00:20:19.692Z" }, + { url = "https://files.pythonhosted.org/packages/73/5b/6d4d3a0b4d312adbfd6d5694c8dddcf1396708976dd87e4d00af439d962b/regex-2025.7.34-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6c053f9647e3421dd2f5dff8172eb7b4eec129df9d1d2f7133a4386319b47435", size = 289788, upload-time = "2025-07-31T00:20:21.941Z" }, + { url = "https://files.pythonhosted.org/packages/92/71/5862ac9913746e5054d01cb9fb8125b3d0802c0706ef547cae1e7f4428fa/regex-2025.7.34-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a16dd56bbcb7d10e62861c3cd000290ddff28ea142ffb5eb3470f183628011ac", size = 286136, upload-time = "2025-07-31T00:20:26.146Z" }, + { url = "https://files.pythonhosted.org/packages/27/df/5b505dc447eb71278eba10d5ec940769ca89c1af70f0468bfbcb98035dc2/regex-2025.7.34-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69c593ff5a24c0d5c1112b0df9b09eae42b33c014bdca7022d6523b210b69f72", size = 797753, upload-time = "2025-07-31T00:20:27.919Z" }, + { url = "https://files.pythonhosted.org/packages/86/38/3e3dc953d13998fa047e9a2414b556201dbd7147034fbac129392363253b/regex-2025.7.34-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98d0ce170fcde1a03b5df19c5650db22ab58af375aaa6ff07978a85c9f250f0e", size = 863263, upload-time = "2025-07-31T00:20:29.803Z" }, + { url = "https://files.pythonhosted.org/packages/68/e5/3ff66b29dde12f5b874dda2d9dec7245c2051f2528d8c2a797901497f140/regex-2025.7.34-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d72765a4bff8c43711d5b0f5b452991a9947853dfa471972169b3cc0ba1d0751", size = 910103, upload-time = "2025-07-31T00:20:31.313Z" }, + { url = "https://files.pythonhosted.org/packages/9e/fe/14176f2182125977fba3711adea73f472a11f3f9288c1317c59cd16ad5e6/regex-2025.7.34-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4494f8fd95a77eb434039ad8460e64d57baa0434f1395b7da44015bef650d0e4", size = 801709, upload-time = "2025-07-31T00:20:33.323Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0d/80d4e66ed24f1ba876a9e8e31b709f9fd22d5c266bf5f3ab3c1afe683d7d/regex-2025.7.34-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4f42b522259c66e918a0121a12429b2abcf696c6f967fa37bdc7b72e61469f98", size = 786726, upload-time = "2025-07-31T00:20:35.252Z" }, + { url = "https://files.pythonhosted.org/packages/12/75/c3ebb30e04a56c046f5c85179dc173818551037daae2c0c940c7b19152cb/regex-2025.7.34-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:aaef1f056d96a0a5d53ad47d019d5b4c66fe4be2da87016e0d43b7242599ffc7", size = 857306, upload-time = "2025-07-31T00:20:37.12Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b2/a4dc5d8b14f90924f27f0ac4c4c4f5e195b723be98adecc884f6716614b6/regex-2025.7.34-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:656433e5b7dccc9bc0da6312da8eb897b81f5e560321ec413500e5367fcd5d47", size = 848494, upload-time = "2025-07-31T00:20:38.818Z" }, + { url = "https://files.pythonhosted.org/packages/0d/21/9ac6e07a4c5e8646a90b56b61f7e9dac11ae0747c857f91d3d2bc7c241d9/regex-2025.7.34-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e91eb2c62c39705e17b4d42d4b86c4e86c884c0d15d9c5a47d0835f8387add8e", size = 787850, upload-time = "2025-07-31T00:20:40.478Z" }, + { url = "https://files.pythonhosted.org/packages/be/6c/d51204e28e7bc54f9a03bb799b04730d7e54ff2718862b8d4e09e7110a6a/regex-2025.7.34-cp314-cp314-win32.whl", hash = "sha256:f978ddfb6216028c8f1d6b0f7ef779949498b64117fc35a939022f67f810bdcb", size = 269730, upload-time = "2025-07-31T00:20:42.253Z" }, + { url = "https://files.pythonhosted.org/packages/74/52/a7e92d02fa1fdef59d113098cb9f02c5d03289a0e9f9e5d4d6acccd10677/regex-2025.7.34-cp314-cp314-win_amd64.whl", hash = "sha256:4b7dc33b9b48fb37ead12ffc7bdb846ac72f99a80373c4da48f64b373a7abeae", size = 278640, upload-time = "2025-07-31T00:20:44.42Z" }, + { url = "https://files.pythonhosted.org/packages/d1/78/a815529b559b1771080faa90c3ab401730661f99d495ab0071649f139ebd/regex-2025.7.34-cp314-cp314-win_arm64.whl", hash = "sha256:4b8c4d39f451e64809912c82392933d80fe2e4a87eeef8859fcc5380d0173c64", size = 271757, upload-time = "2025-07-31T00:20:46.355Z" }, + { url = "https://files.pythonhosted.org/packages/d6/7f/8333b894499c1172c0378bb45a80146c420621e5c7b27a1d8fc5456f7038/regex-2025.7.34-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fd5edc3f453de727af267c7909d083e19f6426fc9dd149e332b6034f2a5611e6", size = 484602, upload-time = "2025-07-31T00:20:48.184Z" }, + { url = "https://files.pythonhosted.org/packages/14/47/58aac4758b659df3835e73bda070f78ec6620a028484a1fcb81daf7443ec/regex-2025.7.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa1cdfb8db96ef20137de5587954c812821966c3e8b48ffc871e22d7ec0a4938", size = 289289, upload-time = "2025-07-31T00:20:49.79Z" }, + { url = "https://files.pythonhosted.org/packages/46/cc/5c9ebdc23b34458a41b559e0ae1b759196b2212920164b9d8aae4b25aa26/regex-2025.7.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89c9504fc96268e8e74b0283e548f53a80c421182a2007e3365805b74ceef936", size = 285931, upload-time = "2025-07-31T00:20:51.362Z" }, + { url = "https://files.pythonhosted.org/packages/9a/da/467a851615b040d3be478ef60fd2d54e7e2f44eeda65dc02866ad4e404df/regex-2025.7.34-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33be70d75fa05a904ee0dc43b650844e067d14c849df7e82ad673541cd465b5f", size = 779782, upload-time = "2025-07-31T00:20:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/a0/47/6eab7100b7ded84e94312c6791ab72581950b7adaa5ad48cdd3dfa329ab8/regex-2025.7.34-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:57d25b6732ea93eeb1d090e8399b6235ca84a651b52d52d272ed37d3d2efa0f1", size = 848838, upload-time = "2025-07-31T00:20:54.991Z" }, + { url = "https://files.pythonhosted.org/packages/17/86/3b07305698e7ff21cc472efae816a56e77c5d45c6b7fe250a56dd67a114e/regex-2025.7.34-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:baf2fe122a3db1c0b9f161aa44463d8f7e33eeeda47bb0309923deb743a18276", size = 896648, upload-time = "2025-07-31T00:20:56.655Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9a/c8f4f0535bf953e34e068c9a30c946e7affa06a48c48c1eda6d3a7562c49/regex-2025.7.34-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a764a83128af9c1a54be81485b34dca488cbcacefe1e1d543ef11fbace191e1", size = 789367, upload-time = "2025-07-31T00:20:58.359Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4e/1892685a0e053d376fbcb8aa618e38afc5882bd69d94e9712171b9f2a412/regex-2025.7.34-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c7f663ccc4093877f55b51477522abd7299a14c5bb7626c5238599db6a0cb95d", size = 780029, upload-time = "2025-07-31T00:21:00.383Z" }, + { url = "https://files.pythonhosted.org/packages/98/12/af86906b9342d37b051b076a3ccc925c4f33ff2a96328b3009e7b93dfc53/regex-2025.7.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4913f52fbc7a744aaebf53acd8d3dc1b519e46ba481d4d7596de3c862e011ada", size = 773039, upload-time = "2025-07-31T00:21:02.093Z" }, + { url = "https://files.pythonhosted.org/packages/97/d1/03c21fb12daf73819f39927b533d09f162e8e452bd415993607242c1cd68/regex-2025.7.34-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:efac4db9e044d47fd3b6b0d40b6708f4dfa2d8131a5ac1d604064147c0f552fd", size = 843438, upload-time = "2025-07-31T00:21:04.248Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7f/53569415d23dc47122c9f669db5d1e7aa2bd8954723e5c1050548cb7622e/regex-2025.7.34-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7373afae7cfb716e3b8e15d0184510d518f9d21471f2d62918dbece85f2c588f", size = 834053, upload-time = "2025-07-31T00:21:06.298Z" }, + { url = "https://files.pythonhosted.org/packages/7a/7a/9b6b75778f7af6306ad9dcd9860be3f9c4123385cc856b6e9d099a6403b2/regex-2025.7.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9960d162f3fecf6af252534a1ae337e9c2e20d74469fed782903b24e2cc9d3d7", size = 777909, upload-time = "2025-07-31T00:21:08.302Z" }, + { url = "https://files.pythonhosted.org/packages/54/34/ebdf85bef946c63dc7995e95710364de0e3e2791bc28afc1a9642373d6c1/regex-2025.7.34-cp39-cp39-win32.whl", hash = "sha256:95d538b10eb4621350a54bf14600cc80b514211d91a019dc74b8e23d2159ace5", size = 264039, upload-time = "2025-07-31T00:21:10.346Z" }, + { url = "https://files.pythonhosted.org/packages/82/0b/fba6f0dee661b838c09c85bf598a43a915d310648d62f704ece237aa3d73/regex-2025.7.34-cp39-cp39-win_amd64.whl", hash = "sha256:f7f3071b5faa605b0ea51ec4bb3ea7257277446b053f4fd3ad02b1dcb4e64353", size = 276120, upload-time = "2025-07-31T00:21:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6d/183f0cf19bd8ac7628f4c3b2ca99033a5ad417ad010f86c61d11d27b4968/regex-2025.7.34-cp39-cp39-win_arm64.whl", hash = "sha256:716a47515ba1d03f8e8a61c5013041c8c90f2e21f055203498105d7571b44531", size = 268390, upload-time = "2025-07-31T00:21:14.293Z" }, ] [[package]] @@ -2405,27 +2635,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/90/5255432602c0b196a0da6720f6f76b93eb50baef46d3c9b0025e2f9acbf3/ruff-0.12.0.tar.gz", hash = "sha256:4d047db3662418d4a848a3fdbfaf17488b34b62f527ed6f10cb8afd78135bc5c", size = 4376101, upload-time = "2025-06-17T15:19:26.217Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/fd/b46bb20e14b11ff49dbc74c61de352e0dc07fb650189513631f6fb5fc69f/ruff-0.12.0-py3-none-linux_armv6l.whl", hash = "sha256:5652a9ecdb308a1754d96a68827755f28d5dfb416b06f60fd9e13f26191a8848", size = 10311554, upload-time = "2025-06-17T15:18:45.792Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d3/021dde5a988fa3e25d2468d1dadeea0ae89dc4bc67d0140c6e68818a12a1/ruff-0.12.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:05ed0c914fabc602fc1f3b42c53aa219e5736cb030cdd85640c32dbc73da74a6", size = 11118435, upload-time = "2025-06-17T15:18:49.064Z" }, - { url = "https://files.pythonhosted.org/packages/07/a2/01a5acf495265c667686ec418f19fd5c32bcc326d4c79ac28824aecd6a32/ruff-0.12.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:07a7aa9b69ac3fcfda3c507916d5d1bca10821fe3797d46bad10f2c6de1edda0", size = 10466010, upload-time = "2025-06-17T15:18:51.341Z" }, - { url = "https://files.pythonhosted.org/packages/4c/57/7caf31dd947d72e7aa06c60ecb19c135cad871a0a8a251723088132ce801/ruff-0.12.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7731c3eec50af71597243bace7ec6104616ca56dda2b99c89935fe926bdcd48", size = 10661366, upload-time = "2025-06-17T15:18:53.29Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ba/aa393b972a782b4bc9ea121e0e358a18981980856190d7d2b6187f63e03a/ruff-0.12.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:952d0630eae628250ab1c70a7fffb641b03e6b4a2d3f3ec6c1d19b4ab6c6c807", size = 10173492, upload-time = "2025-06-17T15:18:55.262Z" }, - { url = "https://files.pythonhosted.org/packages/d7/50/9349ee777614bc3062fc6b038503a59b2034d09dd259daf8192f56c06720/ruff-0.12.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c021f04ea06966b02614d442e94071781c424ab8e02ec7af2f037b4c1e01cc82", size = 11761739, upload-time = "2025-06-17T15:18:58.906Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/ad459de67c70ec112e2ba7206841c8f4eb340a03ee6a5cabc159fe558b8e/ruff-0.12.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d235618283718ee2fe14db07f954f9b2423700919dc688eacf3f8797a11315c", size = 12537098, upload-time = "2025-06-17T15:19:01.316Z" }, - { url = "https://files.pythonhosted.org/packages/ed/50/15ad9c80ebd3c4819f5bd8883e57329f538704ed57bac680d95cb6627527/ruff-0.12.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0758038f81beec8cc52ca22de9685b8ae7f7cc18c013ec2050012862cc9165", size = 12154122, upload-time = "2025-06-17T15:19:03.727Z" }, - { url = "https://files.pythonhosted.org/packages/76/e6/79b91e41bc8cc3e78ee95c87093c6cacfa275c786e53c9b11b9358026b3d/ruff-0.12.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:139b3d28027987b78fc8d6cfb61165447bdf3740e650b7c480744873688808c2", size = 11363374, upload-time = "2025-06-17T15:19:05.875Z" }, - { url = "https://files.pythonhosted.org/packages/db/c3/82b292ff8a561850934549aa9dc39e2c4e783ab3c21debe55a495ddf7827/ruff-0.12.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68853e8517b17bba004152aebd9dd77d5213e503a5f2789395b25f26acac0da4", size = 11587647, upload-time = "2025-06-17T15:19:08.246Z" }, - { url = "https://files.pythonhosted.org/packages/2b/42/d5760d742669f285909de1bbf50289baccb647b53e99b8a3b4f7ce1b2001/ruff-0.12.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3a9512af224b9ac4757f7010843771da6b2b0935a9e5e76bb407caa901a1a514", size = 10527284, upload-time = "2025-06-17T15:19:10.37Z" }, - { url = "https://files.pythonhosted.org/packages/19/f6/fcee9935f25a8a8bba4adbae62495c39ef281256693962c2159e8b284c5f/ruff-0.12.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b08df3d96db798e5beb488d4df03011874aff919a97dcc2dd8539bb2be5d6a88", size = 10158609, upload-time = "2025-06-17T15:19:12.286Z" }, - { url = "https://files.pythonhosted.org/packages/37/fb/057febf0eea07b9384787bfe197e8b3384aa05faa0d6bd844b94ceb29945/ruff-0.12.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6a315992297a7435a66259073681bb0d8647a826b7a6de45c6934b2ca3a9ed51", size = 11141462, upload-time = "2025-06-17T15:19:15.195Z" }, - { url = "https://files.pythonhosted.org/packages/10/7c/1be8571011585914b9d23c95b15d07eec2d2303e94a03df58294bc9274d4/ruff-0.12.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1e55e44e770e061f55a7dbc6e9aed47feea07731d809a3710feda2262d2d4d8a", size = 11641616, upload-time = "2025-06-17T15:19:17.6Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ef/b960ab4818f90ff59e571d03c3f992828d4683561095e80f9ef31f3d58b7/ruff-0.12.0-py3-none-win32.whl", hash = "sha256:7162a4c816f8d1555eb195c46ae0bd819834d2a3f18f98cc63819a7b46f474fb", size = 10525289, upload-time = "2025-06-17T15:19:19.688Z" }, - { url = "https://files.pythonhosted.org/packages/34/93/8b16034d493ef958a500f17cda3496c63a537ce9d5a6479feec9558f1695/ruff-0.12.0-py3-none-win_amd64.whl", hash = "sha256:d00b7a157b8fb6d3827b49d3324da34a1e3f93492c1f97b08e222ad7e9b291e0", size = 11598311, upload-time = "2025-06-17T15:19:21.785Z" }, - { url = "https://files.pythonhosted.org/packages/d0/33/4d3e79e4a84533d6cd526bfb42c020a23256ae5e4265d858bd1287831f7d/ruff-0.12.0-py3-none-win_arm64.whl", hash = "sha256:8cd24580405ad8c1cc64d61725bca091d6b6da7eb3d36f72cc605467069d7e8b", size = 10724946, upload-time = "2025-06-17T15:19:23.952Z" }, +version = "0.12.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/45/2e403fa7007816b5fbb324cb4f8ed3c7402a927a0a0cb2b6279879a8bfdc/ruff-0.12.9.tar.gz", hash = "sha256:fbd94b2e3c623f659962934e52c2bea6fc6da11f667a427a368adaf3af2c866a", size = 5254702, upload-time = "2025-08-14T16:08:55.2Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/20/53bf098537adb7b6a97d98fcdebf6e916fcd11b2e21d15f8c171507909cc/ruff-0.12.9-py3-none-linux_armv6l.whl", hash = "sha256:fcebc6c79fcae3f220d05585229463621f5dbf24d79fdc4936d9302e177cfa3e", size = 11759705, upload-time = "2025-08-14T16:08:12.968Z" }, + { url = "https://files.pythonhosted.org/packages/20/4d/c764ee423002aac1ec66b9d541285dd29d2c0640a8086c87de59ebbe80d5/ruff-0.12.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aed9d15f8c5755c0e74467731a007fcad41f19bcce41cd75f768bbd687f8535f", size = 12527042, upload-time = "2025-08-14T16:08:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/8b/45/cfcdf6d3eb5fc78a5b419e7e616d6ccba0013dc5b180522920af2897e1be/ruff-0.12.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5b15ea354c6ff0d7423814ba6d44be2807644d0c05e9ed60caca87e963e93f70", size = 11724457, upload-time = "2025-08-14T16:08:18.686Z" }, + { url = "https://files.pythonhosted.org/packages/72/e6/44615c754b55662200c48bebb02196dbb14111b6e266ab071b7e7297b4ec/ruff-0.12.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d596c2d0393c2502eaabfef723bd74ca35348a8dac4267d18a94910087807c53", size = 11949446, upload-time = "2025-08-14T16:08:21.059Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d1/9b7d46625d617c7df520d40d5ac6cdcdf20cbccb88fad4b5ecd476a6bb8d/ruff-0.12.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b15599931a1a7a03c388b9c5df1bfa62be7ede6eb7ef753b272381f39c3d0ff", size = 11566350, upload-time = "2025-08-14T16:08:23.433Z" }, + { url = "https://files.pythonhosted.org/packages/59/20/b73132f66f2856bc29d2d263c6ca457f8476b0bbbe064dac3ac3337a270f/ruff-0.12.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d02faa2977fb6f3f32ddb7828e212b7dd499c59eb896ae6c03ea5c303575756", size = 13270430, upload-time = "2025-08-14T16:08:25.837Z" }, + { url = "https://files.pythonhosted.org/packages/a2/21/eaf3806f0a3d4c6be0a69d435646fba775b65f3f2097d54898b0fd4bb12e/ruff-0.12.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:17d5b6b0b3a25259b69ebcba87908496e6830e03acfb929ef9fd4c58675fa2ea", size = 14264717, upload-time = "2025-08-14T16:08:27.907Z" }, + { url = "https://files.pythonhosted.org/packages/d2/82/1d0c53bd37dcb582b2c521d352fbf4876b1e28bc0d8894344198f6c9950d/ruff-0.12.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72db7521860e246adbb43f6ef464dd2a532ef2ef1f5dd0d470455b8d9f1773e0", size = 13684331, upload-time = "2025-08-14T16:08:30.352Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2f/1c5cf6d8f656306d42a686f1e207f71d7cebdcbe7b2aa18e4e8a0cb74da3/ruff-0.12.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a03242c1522b4e0885af63320ad754d53983c9599157ee33e77d748363c561ce", size = 12739151, upload-time = "2025-08-14T16:08:32.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/09/25033198bff89b24d734e6479e39b1968e4c992e82262d61cdccaf11afb9/ruff-0.12.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fc83e4e9751e6c13b5046d7162f205d0a7bac5840183c5beebf824b08a27340", size = 12954992, upload-time = "2025-08-14T16:08:34.816Z" }, + { url = "https://files.pythonhosted.org/packages/52/8e/d0dbf2f9dca66c2d7131feefc386523404014968cd6d22f057763935ab32/ruff-0.12.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:881465ed56ba4dd26a691954650de6ad389a2d1fdb130fe51ff18a25639fe4bb", size = 12899569, upload-time = "2025-08-14T16:08:36.852Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b614d7c08515b1428ed4d3f1d4e3d687deffb2479703b90237682586fa66/ruff-0.12.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:43f07a3ccfc62cdb4d3a3348bf0588358a66da756aa113e071b8ca8c3b9826af", size = 11751983, upload-time = "2025-08-14T16:08:39.314Z" }, + { url = "https://files.pythonhosted.org/packages/58/d6/383e9f818a2441b1a0ed898d7875f11273f10882f997388b2b51cb2ae8b5/ruff-0.12.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:07adb221c54b6bba24387911e5734357f042e5669fa5718920ee728aba3cbadc", size = 11538635, upload-time = "2025-08-14T16:08:41.297Z" }, + { url = "https://files.pythonhosted.org/packages/20/9c/56f869d314edaa9fc1f491706d1d8a47747b9d714130368fbd69ce9024e9/ruff-0.12.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f5cd34fabfdea3933ab85d72359f118035882a01bff15bd1d2b15261d85d5f66", size = 12534346, upload-time = "2025-08-14T16:08:43.39Z" }, + { url = "https://files.pythonhosted.org/packages/bd/4b/d8b95c6795a6c93b439bc913ee7a94fda42bb30a79285d47b80074003ee7/ruff-0.12.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f6be1d2ca0686c54564da8e7ee9e25f93bdd6868263805f8c0b8fc6a449db6d7", size = 13017021, upload-time = "2025-08-14T16:08:45.889Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c1/5f9a839a697ce1acd7af44836f7c2181cdae5accd17a5cb85fcbd694075e/ruff-0.12.9-py3-none-win32.whl", hash = "sha256:cc7a37bd2509974379d0115cc5608a1a4a6c4bff1b452ea69db83c8855d53f93", size = 11734785, upload-time = "2025-08-14T16:08:48.062Z" }, + { url = "https://files.pythonhosted.org/packages/fa/66/cdddc2d1d9a9f677520b7cfc490d234336f523d4b429c1298de359a3be08/ruff-0.12.9-py3-none-win_amd64.whl", hash = "sha256:6fb15b1977309741d7d098c8a3cb7a30bc112760a00fb6efb7abc85f00ba5908", size = 12840654, upload-time = "2025-08-14T16:08:50.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fd/669816bc6b5b93b9586f3c1d87cd6bc05028470b3ecfebb5938252c47a35/ruff-0.12.9-py3-none-win_arm64.whl", hash = "sha256:63c8c819739d86b96d500cce885956a1a48ab056bbcbc61b747ad494b2485089", size = 11949623, upload-time = "2025-08-14T16:08:52.233Z" }, ] [[package]] @@ -2435,10 +2666,10 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "scipy", version = "1.16.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.16.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c5/16/7fe5a9ee6e5c8fddce3a7309a7fa5186df10417676779c24a02f650f7e3b/ruptures-1.1.9.tar.gz", hash = "sha256:aa940f3c02235dab94753ff15689f8ebaca10c83da71cb29cbb7f981dfa362dc", size = 308115, upload-time = "2023-12-11T10:04:44.565Z" } wheels = [ @@ -2559,53 +2790,71 @@ wheels = [ [[package]] name = "scipy" -version = "1.16.0" +version = "1.16.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12'", "python_full_version == '3.11.*'", ] dependencies = [ - { name = "numpy", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/81/18/b06a83f0c5ee8cddbde5e3f3d0bb9b702abfa5136ef6d4620ff67df7eee5/scipy-1.16.0.tar.gz", hash = "sha256:b5ef54021e832869c8cfb03bc3bf20366cbcd426e02a58e8a58d7584dfbb8f62", size = 30581216, upload-time = "2025-06-22T16:27:55.782Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/f8/53fc4884df6b88afd5f5f00240bdc49fee2999c7eff3acf5953eb15bc6f8/scipy-1.16.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:deec06d831b8f6b5fb0b652433be6a09db29e996368ce5911faf673e78d20085", size = 36447362, upload-time = "2025-06-22T16:18:17.817Z" }, - { url = "https://files.pythonhosted.org/packages/c9/25/fad8aa228fa828705142a275fc593d701b1817c98361a2d6b526167d07bc/scipy-1.16.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d30c0fe579bb901c61ab4bb7f3eeb7281f0d4c4a7b52dbf563c89da4fd2949be", size = 28547120, upload-time = "2025-06-22T16:18:24.117Z" }, - { url = "https://files.pythonhosted.org/packages/8d/be/d324ddf6b89fd1c32fecc307f04d095ce84abb52d2e88fab29d0cd8dc7a8/scipy-1.16.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:b2243561b45257f7391d0f49972fca90d46b79b8dbcb9b2cb0f9df928d370ad4", size = 20818922, upload-time = "2025-06-22T16:18:28.035Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e0/cf3f39e399ac83fd0f3ba81ccc5438baba7cfe02176be0da55ff3396f126/scipy-1.16.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:e6d7dfc148135e9712d87c5f7e4f2ddc1304d1582cb3a7d698bbadedb61c7afd", size = 23409695, upload-time = "2025-06-22T16:18:32.497Z" }, - { url = "https://files.pythonhosted.org/packages/5b/61/d92714489c511d3ffd6830ac0eb7f74f243679119eed8b9048e56b9525a1/scipy-1.16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:90452f6a9f3fe5a2cf3748e7be14f9cc7d9b124dce19667b54f5b429d680d539", size = 33444586, upload-time = "2025-06-22T16:18:37.992Z" }, - { url = "https://files.pythonhosted.org/packages/af/2c/40108915fd340c830aee332bb85a9160f99e90893e58008b659b9f3dddc0/scipy-1.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a2f0bf2f58031c8701a8b601df41701d2a7be17c7ffac0a4816aeba89c4cdac8", size = 35284126, upload-time = "2025-06-22T16:18:43.605Z" }, - { url = "https://files.pythonhosted.org/packages/d3/30/e9eb0ad3d0858df35d6c703cba0a7e16a18a56a9e6b211d861fc6f261c5f/scipy-1.16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c4abb4c11fc0b857474241b812ce69ffa6464b4bd8f4ecb786cf240367a36a7", size = 35608257, upload-time = "2025-06-22T16:18:49.09Z" }, - { url = "https://files.pythonhosted.org/packages/c8/ff/950ee3e0d612b375110d8cda211c1f787764b4c75e418a4b71f4a5b1e07f/scipy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b370f8f6ac6ef99815b0d5c9f02e7ade77b33007d74802efc8316c8db98fd11e", size = 38040541, upload-time = "2025-06-22T16:18:55.077Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c9/750d34788288d64ffbc94fdb4562f40f609d3f5ef27ab4f3a4ad00c9033e/scipy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:a16ba90847249bedce8aa404a83fb8334b825ec4a8e742ce6012a7a5e639f95c", size = 38570814, upload-time = "2025-06-22T16:19:00.912Z" }, - { url = "https://files.pythonhosted.org/packages/01/c0/c943bc8d2bbd28123ad0f4f1eef62525fa1723e84d136b32965dcb6bad3a/scipy-1.16.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7eb6bd33cef4afb9fa5f1fb25df8feeb1e52d94f21a44f1d17805b41b1da3180", size = 36459071, upload-time = "2025-06-22T16:19:06.605Z" }, - { url = "https://files.pythonhosted.org/packages/99/0d/270e2e9f1a4db6ffbf84c9a0b648499842046e4e0d9b2275d150711b3aba/scipy-1.16.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1dbc8fdba23e4d80394ddfab7a56808e3e6489176d559c6c71935b11a2d59db1", size = 28490500, upload-time = "2025-06-22T16:19:11.775Z" }, - { url = "https://files.pythonhosted.org/packages/1c/22/01d7ddb07cff937d4326198ec8d10831367a708c3da72dfd9b7ceaf13028/scipy-1.16.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:7dcf42c380e1e3737b343dec21095c9a9ad3f9cbe06f9c05830b44b1786c9e90", size = 20762345, upload-time = "2025-06-22T16:19:15.813Z" }, - { url = "https://files.pythonhosted.org/packages/34/7f/87fd69856569ccdd2a5873fe5d7b5bbf2ad9289d7311d6a3605ebde3a94b/scipy-1.16.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26ec28675f4a9d41587266084c626b02899db373717d9312fa96ab17ca1ae94d", size = 23418563, upload-time = "2025-06-22T16:19:20.746Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f1/e4f4324fef7f54160ab749efbab6a4bf43678a9eb2e9817ed71a0a2fd8de/scipy-1.16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:952358b7e58bd3197cfbd2f2f2ba829f258404bdf5db59514b515a8fe7a36c52", size = 33203951, upload-time = "2025-06-22T16:19:25.813Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f0/b6ac354a956384fd8abee2debbb624648125b298f2c4a7b4f0d6248048a5/scipy-1.16.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03931b4e870c6fef5b5c0970d52c9f6ddd8c8d3e934a98f09308377eba6f3824", size = 35070225, upload-time = "2025-06-22T16:19:31.416Z" }, - { url = "https://files.pythonhosted.org/packages/e5/73/5cbe4a3fd4bc3e2d67ffad02c88b83edc88f381b73ab982f48f3df1a7790/scipy-1.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:512c4f4f85912767c351a0306824ccca6fd91307a9f4318efe8fdbd9d30562ef", size = 35389070, upload-time = "2025-06-22T16:19:37.387Z" }, - { url = "https://files.pythonhosted.org/packages/86/e8/a60da80ab9ed68b31ea5a9c6dfd3c2f199347429f229bf7f939a90d96383/scipy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e69f798847e9add03d512eaf5081a9a5c9a98757d12e52e6186ed9681247a1ac", size = 37825287, upload-time = "2025-06-22T16:19:43.375Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b5/29fece1a74c6a94247f8a6fb93f5b28b533338e9c34fdcc9cfe7a939a767/scipy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:adf9b1999323ba335adc5d1dc7add4781cb5a4b0ef1e98b79768c05c796c4e49", size = 38431929, upload-time = "2025-06-22T16:19:49.385Z" }, - { url = "https://files.pythonhosted.org/packages/46/95/0746417bc24be0c2a7b7563946d61f670a3b491b76adede420e9d173841f/scipy-1.16.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:e9f414cbe9ca289a73e0cc92e33a6a791469b6619c240aa32ee18abdce8ab451", size = 36418162, upload-time = "2025-06-22T16:19:56.3Z" }, - { url = "https://files.pythonhosted.org/packages/19/5a/914355a74481b8e4bbccf67259bbde171348a3f160b67b4945fbc5f5c1e5/scipy-1.16.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:bbba55fb97ba3cdef9b1ee973f06b09d518c0c7c66a009c729c7d1592be1935e", size = 28465985, upload-time = "2025-06-22T16:20:01.238Z" }, - { url = "https://files.pythonhosted.org/packages/58/46/63477fc1246063855969cbefdcee8c648ba4b17f67370bd542ba56368d0b/scipy-1.16.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:58e0d4354eacb6004e7aa1cd350e5514bd0270acaa8d5b36c0627bb3bb486974", size = 20737961, upload-time = "2025-06-22T16:20:05.913Z" }, - { url = "https://files.pythonhosted.org/packages/93/86/0fbb5588b73555e40f9d3d6dde24ee6fac7d8e301a27f6f0cab9d8f66ff2/scipy-1.16.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:75b2094ec975c80efc273567436e16bb794660509c12c6a31eb5c195cbf4b6dc", size = 23377941, upload-time = "2025-06-22T16:20:10.668Z" }, - { url = "https://files.pythonhosted.org/packages/ca/80/a561f2bf4c2da89fa631b3cbf31d120e21ea95db71fd9ec00cb0247c7a93/scipy-1.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b65d232157a380fdd11a560e7e21cde34fdb69d65c09cb87f6cc024ee376351", size = 33196703, upload-time = "2025-06-22T16:20:16.097Z" }, - { url = "https://files.pythonhosted.org/packages/11/6b/3443abcd0707d52e48eb315e33cc669a95e29fc102229919646f5a501171/scipy-1.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d8747f7736accd39289943f7fe53a8333be7f15a82eea08e4afe47d79568c32", size = 35083410, upload-time = "2025-06-22T16:20:21.734Z" }, - { url = "https://files.pythonhosted.org/packages/20/ab/eb0fc00e1e48961f1bd69b7ad7e7266896fe5bad4ead91b5fc6b3561bba4/scipy-1.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eb9f147a1b8529bb7fec2a85cf4cf42bdfadf9e83535c309a11fdae598c88e8b", size = 35387829, upload-time = "2025-06-22T16:20:27.548Z" }, - { url = "https://files.pythonhosted.org/packages/57/9e/d6fc64e41fad5d481c029ee5a49eefc17f0b8071d636a02ceee44d4a0de2/scipy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d2b83c37edbfa837a8923d19c749c1935ad3d41cf196006a24ed44dba2ec4358", size = 37841356, upload-time = "2025-06-22T16:20:35.112Z" }, - { url = "https://files.pythonhosted.org/packages/7c/a7/4c94bbe91f12126b8bf6709b2471900577b7373a4fd1f431f28ba6f81115/scipy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:79a3c13d43c95aa80b87328a46031cf52508cf5f4df2767602c984ed1d3c6bbe", size = 38403710, upload-time = "2025-06-22T16:21:54.473Z" }, - { url = "https://files.pythonhosted.org/packages/47/20/965da8497f6226e8fa90ad3447b82ed0e28d942532e92dd8b91b43f100d4/scipy-1.16.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:f91b87e1689f0370690e8470916fe1b2308e5b2061317ff76977c8f836452a47", size = 36813833, upload-time = "2025-06-22T16:20:43.925Z" }, - { url = "https://files.pythonhosted.org/packages/28/f4/197580c3dac2d234e948806e164601c2df6f0078ed9f5ad4a62685b7c331/scipy-1.16.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:88a6ca658fb94640079e7a50b2ad3b67e33ef0f40e70bdb7dc22017dae73ac08", size = 28974431, upload-time = "2025-06-22T16:20:51.302Z" }, - { url = "https://files.pythonhosted.org/packages/8a/fc/e18b8550048d9224426e76906694c60028dbdb65d28b1372b5503914b89d/scipy-1.16.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ae902626972f1bd7e4e86f58fd72322d7f4ec7b0cfc17b15d4b7006efc385176", size = 21246454, upload-time = "2025-06-22T16:20:57.276Z" }, - { url = "https://files.pythonhosted.org/packages/8c/48/07b97d167e0d6a324bfd7484cd0c209cc27338b67e5deadae578cf48e809/scipy-1.16.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:8cb824c1fc75ef29893bc32b3ddd7b11cf9ab13c1127fe26413a05953b8c32ed", size = 23772979, upload-time = "2025-06-22T16:21:03.363Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4f/9efbd3f70baf9582edf271db3002b7882c875ddd37dc97f0f675ad68679f/scipy-1.16.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:de2db7250ff6514366a9709c2cba35cb6d08498e961cba20d7cff98a7ee88938", size = 33341972, upload-time = "2025-06-22T16:21:11.14Z" }, - { url = "https://files.pythonhosted.org/packages/3f/dc/9e496a3c5dbe24e76ee24525155ab7f659c20180bab058ef2c5fa7d9119c/scipy-1.16.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e85800274edf4db8dd2e4e93034f92d1b05c9421220e7ded9988b16976f849c1", size = 35185476, upload-time = "2025-06-22T16:21:19.156Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b3/21001cff985a122ba434c33f2c9d7d1dc3b669827e94f4fc4e1fe8b9dfd8/scipy-1.16.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4f720300a3024c237ace1cb11f9a84c38beb19616ba7c4cdcd771047a10a1706", size = 35570990, upload-time = "2025-06-22T16:21:27.797Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d3/7ba42647d6709251cdf97043d0c107e0317e152fa2f76873b656b509ff55/scipy-1.16.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aad603e9339ddb676409b104c48a027e9916ce0d2838830691f39552b38a352e", size = 37950262, upload-time = "2025-06-22T16:21:36.976Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c4/231cac7a8385394ebbbb4f1ca662203e9d8c332825ab4f36ffc3ead09a42/scipy-1.16.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f56296fefca67ba605fd74d12f7bd23636267731a72cb3947963e76b8c0a25db", size = 38515076, upload-time = "2025-06-22T16:21:45.694Z" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/4a/b927028464795439faec8eaf0b03b011005c487bb2d07409f28bf30879c4/scipy-1.16.1.tar.gz", hash = "sha256:44c76f9e8b6e8e488a586190ab38016e4ed2f8a038af7cd3defa903c0a2238b3", size = 30580861, upload-time = "2025-07-27T16:33:30.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/91/812adc6f74409b461e3a5fa97f4f74c769016919203138a3bf6fc24ba4c5/scipy-1.16.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c033fa32bab91dc98ca59d0cf23bb876454e2bb02cbe592d5023138778f70030", size = 36552519, upload-time = "2025-07-27T16:26:29.658Z" }, + { url = "https://files.pythonhosted.org/packages/47/18/8e355edcf3b71418d9e9f9acd2708cc3a6c27e8f98fde0ac34b8a0b45407/scipy-1.16.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6e5c2f74e5df33479b5cd4e97a9104c511518fbd979aa9b8f6aec18b2e9ecae7", size = 28638010, upload-time = "2025-07-27T16:26:38.196Z" }, + { url = "https://files.pythonhosted.org/packages/d9/eb/e931853058607bdfbc11b86df19ae7a08686121c203483f62f1ecae5989c/scipy-1.16.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0a55ffe0ba0f59666e90951971a884d1ff6f4ec3275a48f472cfb64175570f77", size = 20909790, upload-time = "2025-07-27T16:26:43.93Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/be83a271d6e96750cd0be2e000f35ff18880a46f05ce8b5d3465dc0f7a2a/scipy-1.16.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f8a5d6cd147acecc2603fbd382fed6c46f474cccfcf69ea32582e033fb54dcfe", size = 23513352, upload-time = "2025-07-27T16:26:50.017Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bf/fe6eb47e74f762f933cca962db7f2c7183acfdc4483bd1c3813cfe83e538/scipy-1.16.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb18899127278058bcc09e7b9966d41a5a43740b5bb8dcba401bd983f82e885b", size = 33534643, upload-time = "2025-07-27T16:26:57.503Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ba/63f402e74875486b87ec6506a4f93f6d8a0d94d10467280f3d9d7837ce3a/scipy-1.16.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adccd93a2fa937a27aae826d33e3bfa5edf9aa672376a4852d23a7cd67a2e5b7", size = 35376776, upload-time = "2025-07-27T16:27:06.639Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b4/04eb9d39ec26a1b939689102da23d505ea16cdae3dbb18ffc53d1f831044/scipy-1.16.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18aca1646a29ee9a0625a1be5637fa798d4d81fdf426481f06d69af828f16958", size = 35698906, upload-time = "2025-07-27T16:27:14.943Z" }, + { url = "https://files.pythonhosted.org/packages/04/d6/bb5468da53321baeb001f6e4e0d9049eadd175a4a497709939128556e3ec/scipy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d85495cef541729a70cdddbbf3e6b903421bc1af3e8e3a9a72a06751f33b7c39", size = 38129275, upload-time = "2025-07-27T16:27:23.873Z" }, + { url = "https://files.pythonhosted.org/packages/c4/94/994369978509f227cba7dfb9e623254d0d5559506fe994aef4bea3ed469c/scipy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:226652fca853008119c03a8ce71ffe1b3f6d2844cc1686e8f9806edafae68596", size = 38644572, upload-time = "2025-07-27T16:27:32.637Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d9/ec4864f5896232133f51382b54a08de91a9d1af7a76dfa372894026dfee2/scipy-1.16.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81b433bbeaf35728dad619afc002db9b189e45eebe2cd676effe1fb93fef2b9c", size = 36575194, upload-time = "2025-07-27T16:27:41.321Z" }, + { url = "https://files.pythonhosted.org/packages/5c/6d/40e81ecfb688e9d25d34a847dca361982a6addf8e31f0957b1a54fbfa994/scipy-1.16.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:886cc81fdb4c6903a3bb0464047c25a6d1016fef77bb97949817d0c0d79f9e04", size = 28594590, upload-time = "2025-07-27T16:27:49.204Z" }, + { url = "https://files.pythonhosted.org/packages/0e/37/9f65178edfcc629377ce9a64fc09baebea18c80a9e57ae09a52edf84880b/scipy-1.16.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:15240c3aac087a522b4eaedb09f0ad061753c5eebf1ea430859e5bf8640d5919", size = 20866458, upload-time = "2025-07-27T16:27:54.98Z" }, + { url = "https://files.pythonhosted.org/packages/2c/7b/749a66766871ea4cb1d1ea10f27004db63023074c22abed51f22f09770e0/scipy-1.16.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:65f81a25805f3659b48126b5053d9e823d3215e4a63730b5e1671852a1705921", size = 23539318, upload-time = "2025-07-27T16:28:01.604Z" }, + { url = "https://files.pythonhosted.org/packages/c4/db/8d4afec60eb833a666434d4541a3151eedbf2494ea6d4d468cbe877f00cd/scipy-1.16.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c62eea7f607f122069b9bad3f99489ddca1a5173bef8a0c75555d7488b6f725", size = 33292899, upload-time = "2025-07-27T16:28:09.147Z" }, + { url = "https://files.pythonhosted.org/packages/51/1e/79023ca3bbb13a015d7d2757ecca3b81293c663694c35d6541b4dca53e98/scipy-1.16.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f965bbf3235b01c776115ab18f092a95aa74c271a52577bcb0563e85738fd618", size = 35162637, upload-time = "2025-07-27T16:28:17.535Z" }, + { url = "https://files.pythonhosted.org/packages/b6/49/0648665f9c29fdaca4c679182eb972935b3b4f5ace41d323c32352f29816/scipy-1.16.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f006e323874ffd0b0b816d8c6a8e7f9a73d55ab3b8c3f72b752b226d0e3ac83d", size = 35490507, upload-time = "2025-07-27T16:28:25.705Z" }, + { url = "https://files.pythonhosted.org/packages/62/8f/66cbb9d6bbb18d8c658f774904f42a92078707a7c71e5347e8bf2f52bb89/scipy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8fd15fc5085ab4cca74cb91fe0a4263b1f32e4420761ddae531ad60934c2119", size = 37923998, upload-time = "2025-07-27T16:28:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/14/c3/61f273ae550fbf1667675701112e380881905e28448c080b23b5a181df7c/scipy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:f7b8013c6c066609577d910d1a2a077021727af07b6fab0ee22c2f901f22352a", size = 38508060, upload-time = "2025-07-27T16:28:43.242Z" }, + { url = "https://files.pythonhosted.org/packages/93/0b/b5c99382b839854a71ca9482c684e3472badc62620287cbbdab499b75ce6/scipy-1.16.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5451606823a5e73dfa621a89948096c6528e2896e40b39248295d3a0138d594f", size = 36533717, upload-time = "2025-07-27T16:28:51.706Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e5/69ab2771062c91e23e07c12e7d5033a6b9b80b0903ee709c3c36b3eb520c/scipy-1.16.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:89728678c5ca5abd610aee148c199ac1afb16e19844401ca97d43dc548a354eb", size = 28570009, upload-time = "2025-07-27T16:28:57.017Z" }, + { url = "https://files.pythonhosted.org/packages/f4/69/bd75dbfdd3cf524f4d753484d723594aed62cfaac510123e91a6686d520b/scipy-1.16.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e756d688cb03fd07de0fffad475649b03cb89bee696c98ce508b17c11a03f95c", size = 20841942, upload-time = "2025-07-27T16:29:01.152Z" }, + { url = "https://files.pythonhosted.org/packages/ea/74/add181c87663f178ba7d6144b370243a87af8476664d5435e57d599e6874/scipy-1.16.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5aa2687b9935da3ed89c5dbed5234576589dd28d0bf7cd237501ccfbdf1ad608", size = 23498507, upload-time = "2025-07-27T16:29:05.202Z" }, + { url = "https://files.pythonhosted.org/packages/1d/74/ece2e582a0d9550cee33e2e416cc96737dce423a994d12bbe59716f47ff1/scipy-1.16.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0851f6a1e537fe9399f35986897e395a1aa61c574b178c0d456be5b1a0f5ca1f", size = 33286040, upload-time = "2025-07-27T16:29:10.201Z" }, + { url = "https://files.pythonhosted.org/packages/e4/82/08e4076df538fb56caa1d489588d880ec7c52d8273a606bb54d660528f7c/scipy-1.16.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fedc2cbd1baed37474b1924c331b97bdff611d762c196fac1a9b71e67b813b1b", size = 35176096, upload-time = "2025-07-27T16:29:17.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/79/cd710aab8c921375711a8321c6be696e705a120e3011a643efbbcdeeabcc/scipy-1.16.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2ef500e72f9623a6735769e4b93e9dcb158d40752cdbb077f305487e3e2d1f45", size = 35490328, upload-time = "2025-07-27T16:29:22.928Z" }, + { url = "https://files.pythonhosted.org/packages/71/73/e9cc3d35ee4526d784520d4494a3e1ca969b071fb5ae5910c036a375ceec/scipy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:978d8311674b05a8f7ff2ea6c6bce5d8b45a0cb09d4c5793e0318f448613ea65", size = 37939921, upload-time = "2025-07-27T16:29:29.108Z" }, + { url = "https://files.pythonhosted.org/packages/21/12/c0efd2941f01940119b5305c375ae5c0fcb7ec193f806bd8f158b73a1782/scipy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:81929ed0fa7a5713fcdd8b2e6f73697d3b4c4816d090dd34ff937c20fa90e8ab", size = 38479462, upload-time = "2025-07-27T16:30:24.078Z" }, + { url = "https://files.pythonhosted.org/packages/7a/19/c3d08b675260046a991040e1ea5d65f91f40c7df1045fffff412dcfc6765/scipy-1.16.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:bcc12db731858abda693cecdb3bdc9e6d4bd200213f49d224fe22df82687bdd6", size = 36938832, upload-time = "2025-07-27T16:29:35.057Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/ce53db652c033a414a5b34598dba6b95f3d38153a2417c5a3883da429029/scipy-1.16.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:744d977daa4becb9fc59135e75c069f8d301a87d64f88f1e602a9ecf51e77b27", size = 29093084, upload-time = "2025-07-27T16:29:40.201Z" }, + { url = "https://files.pythonhosted.org/packages/a9/ae/7a10ff04a7dc15f9057d05b33737ade244e4bd195caa3f7cc04d77b9e214/scipy-1.16.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:dc54f76ac18073bcecffb98d93f03ed6b81a92ef91b5d3b135dcc81d55a724c7", size = 21365098, upload-time = "2025-07-27T16:29:44.295Z" }, + { url = "https://files.pythonhosted.org/packages/36/ac/029ff710959932ad3c2a98721b20b405f05f752f07344622fd61a47c5197/scipy-1.16.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:367d567ee9fc1e9e2047d31f39d9d6a7a04e0710c86e701e053f237d14a9b4f6", size = 23896858, upload-time = "2025-07-27T16:29:48.784Z" }, + { url = "https://files.pythonhosted.org/packages/71/13/d1ef77b6bd7898720e1f0b6b3743cb945f6c3cafa7718eaac8841035ab60/scipy-1.16.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4cf5785e44e19dcd32a0e4807555e1e9a9b8d475c6afff3d21c3c543a6aa84f4", size = 33438311, upload-time = "2025-07-27T16:29:54.164Z" }, + { url = "https://files.pythonhosted.org/packages/2d/e0/e64a6821ffbb00b4c5b05169f1c1fddb4800e9307efe3db3788995a82a2c/scipy-1.16.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3d0b80fb26d3e13a794c71d4b837e2a589d839fd574a6bbb4ee1288c213ad4a3", size = 35279542, upload-time = "2025-07-27T16:30:00.249Z" }, + { url = "https://files.pythonhosted.org/packages/57/59/0dc3c8b43e118f1e4ee2b798dcc96ac21bb20014e5f1f7a8e85cc0653bdb/scipy-1.16.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8503517c44c18d1030d666cb70aaac1cc8913608816e06742498833b128488b7", size = 35667665, upload-time = "2025-07-27T16:30:05.916Z" }, + { url = "https://files.pythonhosted.org/packages/45/5f/844ee26e34e2f3f9f8febb9343748e72daeaec64fe0c70e9bf1ff84ec955/scipy-1.16.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:30cc4bb81c41831ecfd6dc450baf48ffd80ef5aed0f5cf3ea775740e80f16ecc", size = 38045210, upload-time = "2025-07-27T16:30:11.655Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d7/210f2b45290f444f1de64bc7353aa598ece9f0e90c384b4a156f9b1a5063/scipy-1.16.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c24fa02f7ed23ae514460a22c57eca8f530dbfa50b1cfdbf4f37c05b5309cc39", size = 38593661, upload-time = "2025-07-27T16:30:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/81/ea/84d481a5237ed223bd3d32d6e82d7a6a96e34756492666c260cef16011d1/scipy-1.16.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:796a5a9ad36fa3a782375db8f4241ab02a091308eb079746bc0f874c9b998318", size = 36525921, upload-time = "2025-07-27T16:30:30.081Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9f/d9edbdeff9f3a664807ae3aea383e10afaa247e8e6255e6d2aa4515e8863/scipy-1.16.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:3ea0733a2ff73fd6fdc5fecca54ee9b459f4d74f00b99aced7d9a3adb43fb1cc", size = 28564152, upload-time = "2025-07-27T16:30:35.336Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/8125bcb1fe04bc267d103e76516243e8d5e11229e6b306bda1024a5423d1/scipy-1.16.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:85764fb15a2ad994e708258bb4ed8290d1305c62a4e1ef07c414356a24fcfbf8", size = 20836028, upload-time = "2025-07-27T16:30:39.421Z" }, + { url = "https://files.pythonhosted.org/packages/77/9c/bf92e215701fc70bbcd3d14d86337cf56a9b912a804b9c776a269524a9e9/scipy-1.16.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:ca66d980469cb623b1759bdd6e9fd97d4e33a9fad5b33771ced24d0cb24df67e", size = 23489666, upload-time = "2025-07-27T16:30:43.663Z" }, + { url = "https://files.pythonhosted.org/packages/5e/00/5e941d397d9adac41b02839011594620d54d99488d1be5be755c00cde9ee/scipy-1.16.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7cc1ffcc230f568549fc56670bcf3df1884c30bd652c5da8138199c8c76dae0", size = 33358318, upload-time = "2025-07-27T16:30:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/0e/87/8db3aa10dde6e3e8e7eb0133f24baa011377d543f5b19c71469cf2648026/scipy-1.16.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ddfb1e8d0b540cb4ee9c53fc3dea3186f97711248fb94b4142a1b27178d8b4b", size = 35185724, upload-time = "2025-07-27T16:30:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/89/b4/6ab9ae443216807622bcff02690262d8184078ea467efee2f8c93288a3b1/scipy-1.16.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4dc0e7be79e95d8ba3435d193e0d8ce372f47f774cffd882f88ea4e1e1ddc731", size = 35554335, upload-time = "2025-07-27T16:30:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/9c/9a/d0e9dc03c5269a1afb60661118296a32ed5d2c24298af61b676c11e05e56/scipy-1.16.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f23634f9e5adb51b2a77766dac217063e764337fbc816aa8ad9aaebcd4397fd3", size = 37960310, upload-time = "2025-07-27T16:31:06.151Z" }, + { url = "https://files.pythonhosted.org/packages/5e/00/c8f3130a50521a7977874817ca89e0599b1b4ee8e938bad8ae798a0e1f0d/scipy-1.16.1-cp314-cp314-win_amd64.whl", hash = "sha256:57d75524cb1c5a374958a2eae3d84e1929bb971204cc9d52213fb8589183fc19", size = 39319239, upload-time = "2025-07-27T16:31:59.942Z" }, + { url = "https://files.pythonhosted.org/packages/f2/f2/1ca3eda54c3a7e4c92f6acef7db7b3a057deb135540d23aa6343ef8ad333/scipy-1.16.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:d8da7c3dd67bcd93f15618938f43ed0995982eb38973023d46d4646c4283ad65", size = 36939460, upload-time = "2025-07-27T16:31:11.865Z" }, + { url = "https://files.pythonhosted.org/packages/80/30/98c2840b293a132400c0940bb9e140171dcb8189588619048f42b2ce7b4f/scipy-1.16.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:cc1d2f2fd48ba1e0620554fe5bc44d3e8f5d4185c8c109c7fbdf5af2792cfad2", size = 29093322, upload-time = "2025-07-27T16:31:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/c1/e6/1e6e006e850622cf2a039b62d1a6ddc4497d4851e58b68008526f04a9a00/scipy-1.16.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:21a611ced9275cb861bacadbada0b8c0623bc00b05b09eb97f23b370fc2ae56d", size = 21365329, upload-time = "2025-07-27T16:31:21.188Z" }, + { url = "https://files.pythonhosted.org/packages/8e/02/72a5aa5b820589dda9a25e329ca752842bfbbaf635e36bc7065a9b42216e/scipy-1.16.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dfbb25dffc4c3dd9371d8ab456ca81beeaf6f9e1c2119f179392f0dc1ab7695", size = 23897544, upload-time = "2025-07-27T16:31:25.408Z" }, + { url = "https://files.pythonhosted.org/packages/2b/dc/7122d806a6f9eb8a33532982234bed91f90272e990f414f2830cfe656e0b/scipy-1.16.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f0ebb7204f063fad87fc0a0e4ff4a2ff40b2a226e4ba1b7e34bf4b79bf97cd86", size = 33442112, upload-time = "2025-07-27T16:31:30.62Z" }, + { url = "https://files.pythonhosted.org/packages/24/39/e383af23564daa1021a5b3afbe0d8d6a68ec639b943661841f44ac92de85/scipy-1.16.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f1b9e5962656f2734c2b285a8745358ecb4e4efbadd00208c80a389227ec61ff", size = 35286594, upload-time = "2025-07-27T16:31:36.112Z" }, + { url = "https://files.pythonhosted.org/packages/95/47/1a0b0aff40c3056d955f38b0df5d178350c3d74734ec54f9c68d23910be5/scipy-1.16.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e1a106f8c023d57a2a903e771228bf5c5b27b5d692088f457acacd3b54511e4", size = 35665080, upload-time = "2025-07-27T16:31:42.025Z" }, + { url = "https://files.pythonhosted.org/packages/64/df/ce88803e9ed6e27fe9b9abefa157cf2c80e4fa527cf17ee14be41f790ad4/scipy-1.16.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:709559a1db68a9abc3b2c8672c4badf1614f3b440b3ab326d86a5c0491eafae3", size = 38050306, upload-time = "2025-07-27T16:31:48.109Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6c/a76329897a7cae4937d403e623aa6aaea616a0bb5b36588f0b9d1c9a3739/scipy-1.16.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c0c804d60492a0aad7f5b2bb1862f4548b990049e27e828391ff2bf6f7199998", size = 39427705, upload-time = "2025-07-27T16:31:53.96Z" }, ] [[package]] @@ -2675,44 +2924,44 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.9.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/f3/50ec5709fad61641e4411eb1b9ac55b99801d71f1993c29853f256c726c9/tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382", size = 1065770, upload-time = "2025-02-14T06:02:01.251Z" }, - { url = "https://files.pythonhosted.org/packages/d6/f8/5a9560a422cf1755b6e0a9a436e14090eeb878d8ec0f80e0cd3d45b78bf4/tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108", size = 1009314, upload-time = "2025-02-14T06:02:02.869Z" }, - { url = "https://files.pythonhosted.org/packages/bc/20/3ed4cfff8f809cb902900ae686069e029db74567ee10d017cb254df1d598/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd", size = 1143140, upload-time = "2025-02-14T06:02:04.165Z" }, - { url = "https://files.pythonhosted.org/packages/f1/95/cc2c6d79df8f113bdc6c99cdec985a878768120d87d839a34da4bd3ff90a/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de", size = 1197860, upload-time = "2025-02-14T06:02:06.268Z" }, - { url = "https://files.pythonhosted.org/packages/c7/6c/9c1a4cc51573e8867c9381db1814223c09ebb4716779c7f845d48688b9c8/tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990", size = 1259661, upload-time = "2025-02-14T06:02:08.889Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4c/22eb8e9856a2b1808d0a002d171e534eac03f96dbe1161978d7389a59498/tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4", size = 894026, upload-time = "2025-02-14T06:02:12.841Z" }, - { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, - { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, - { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, - { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, - { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, - { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, - { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, - { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, - { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, - { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, - { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, - { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919, upload-time = "2025-02-14T06:02:37.494Z" }, - { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877, upload-time = "2025-02-14T06:02:39.516Z" }, - { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095, upload-time = "2025-02-14T06:02:41.791Z" }, - { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649, upload-time = "2025-02-14T06:02:43Z" }, - { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465, upload-time = "2025-02-14T06:02:45.046Z" }, - { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669, upload-time = "2025-02-14T06:02:47.341Z" }, - { url = "https://files.pythonhosted.org/packages/c4/92/4d681b5c066d417b98f22a0176358d9e606e183c6b61c337d61fb54accb4/tiktoken-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c6386ca815e7d96ef5b4ac61e0048cd32ca5a92d5781255e13b31381d28667dc", size = 1066217, upload-time = "2025-02-14T06:02:49.259Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/af27bbe186df481666de48cf0f2f4e0643ba9c78b472e7bf70144c663b22/tiktoken-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75f6d5db5bc2c6274b674ceab1615c1778e6416b14705827d19b40e6355f03e0", size = 1009441, upload-time = "2025-02-14T06:02:51.347Z" }, - { url = "https://files.pythonhosted.org/packages/33/35/2792b7dcb8b150d2767322637513c73a3e80833c19212efea80b31087894/tiktoken-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e15b16f61e6f4625a57a36496d28dd182a8a60ec20a534c5343ba3cafa156ac7", size = 1144423, upload-time = "2025-02-14T06:02:52.547Z" }, - { url = "https://files.pythonhosted.org/packages/65/ae/4d1682510172ce3500bbed3b206ebc4efefe280f0bf1179cfb043f88cc16/tiktoken-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebcec91babf21297022882344c3f7d9eed855931466c3311b1ad6b64befb3df", size = 1199002, upload-time = "2025-02-14T06:02:55.72Z" }, - { url = "https://files.pythonhosted.org/packages/1c/2e/df2dc31dd161190f315829775a9652ea01d60f307af8f98e35bdd14a6a93/tiktoken-0.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5fd49e7799579240f03913447c0cdfa1129625ebd5ac440787afc4345990427", size = 1260610, upload-time = "2025-02-14T06:02:56.924Z" }, - { url = "https://files.pythonhosted.org/packages/70/22/e8fc1bf9cdecc439b7ddc28a45b976a8c699a38874c070749d855696368a/tiktoken-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:26242ca9dc8b58e875ff4ca078b9a94d2f0813e6a535dcd2205df5d49d927cc7", size = 894215, upload-time = "2025-02-14T06:02:59.031Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, + { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, + { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, + { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, + { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, + { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, + { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, + { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, + { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, + { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, + { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, + { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, + { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, + { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b6/81c5799ab77a9580c6d840cf77d4717e929193a42190fd623a080c647aa6/tiktoken-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:13220f12c9e82e399377e768640ddfe28bea962739cc3a869cad98f42c419a89", size = 1061648, upload-time = "2025-08-08T23:58:00.753Z" }, + { url = "https://files.pythonhosted.org/packages/50/89/faa668066b2a4640534ef5797c09ecd0a48b43367502129b217339dfaa97/tiktoken-0.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f2db627f5c74477c0404b4089fd8a28ae22fa982a6f7d9c7d4c305c375218f3", size = 1000950, upload-time = "2025-08-08T23:58:01.855Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/5f950528b54cb3025af4bc3522c23dbfb691afe8ffb292aa1e8dc2e6bddf/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2302772f035dceb2bcf8e55a735e4604a0b51a6dd50f38218ff664d46ec43807", size = 1130777, upload-time = "2025-08-08T23:58:03.256Z" }, + { url = "https://files.pythonhosted.org/packages/27/a4/e82ddf0773835ba24536ac8c0dce561e697698ec020a93212a1e041d39b4/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b977989afe44c94bcc50db1f76971bb26dca44218bd203ba95925ef56f8e7a", size = 1185692, upload-time = "2025-08-08T23:58:04.476Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c2/06361e41d176e62797ae65fa678111cdd30553321cf4d83e7b84107ea95f/tiktoken-0.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:669a1aa1ad6ebf1b3c26b45deb346f345da7680f845b5ea700bba45c20dea24c", size = 1246518, upload-time = "2025-08-08T23:58:06.126Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ad/ca37e15c46741ebb3904d562d03194e845539a08f7751a6df0f391757312/tiktoken-0.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:e363f33c720a055586f730c00e330df4c7ea0024bf1c83a8a9a9dbc054c4f304", size = 884702, upload-time = "2025-08-08T23:58:07.534Z" }, ] [[package]] @@ -2756,26 +3005,26 @@ wheels = [ [[package]] name = "tornado" -version = "6.5.1" +version = "6.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/89/c72771c81d25d53fe33e3dca61c233b665b2780f21820ba6fd2c6793c12b/tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c", size = 509934, upload-time = "2025-05-22T18:15:38.788Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/89/f4532dee6843c9e0ebc4e28d4be04c67f54f60813e4bf73d595fe7567452/tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7", size = 441948, upload-time = "2025-05-22T18:15:20.862Z" }, - { url = "https://files.pythonhosted.org/packages/15/9a/557406b62cffa395d18772e0cdcf03bed2fff03b374677348eef9f6a3792/tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6", size = 440112, upload-time = "2025-05-22T18:15:22.591Z" }, - { url = "https://files.pythonhosted.org/packages/55/82/7721b7319013a3cf881f4dffa4f60ceff07b31b394e459984e7a36dc99ec/tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888", size = 443672, upload-time = "2025-05-22T18:15:24.027Z" }, - { url = "https://files.pythonhosted.org/packages/7d/42/d11c4376e7d101171b94e03cef0cbce43e823ed6567ceda571f54cf6e3ce/tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331", size = 443019, upload-time = "2025-05-22T18:15:25.735Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f7/0c48ba992d875521ac761e6e04b0a1750f8150ae42ea26df1852d6a98942/tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e", size = 443252, upload-time = "2025-05-22T18:15:27.499Z" }, - { url = "https://files.pythonhosted.org/packages/89/46/d8d7413d11987e316df4ad42e16023cd62666a3c0dfa1518ffa30b8df06c/tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401", size = 443930, upload-time = "2025-05-22T18:15:29.299Z" }, - { url = "https://files.pythonhosted.org/packages/78/b2/f8049221c96a06df89bed68260e8ca94beca5ea532ffc63b1175ad31f9cc/tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692", size = 443351, upload-time = "2025-05-22T18:15:31.038Z" }, - { url = "https://files.pythonhosted.org/packages/76/ff/6a0079e65b326cc222a54720a748e04a4db246870c4da54ece4577bfa702/tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a", size = 443328, upload-time = "2025-05-22T18:15:32.426Z" }, - { url = "https://files.pythonhosted.org/packages/49/18/e3f902a1d21f14035b5bc6246a8c0f51e0eef562ace3a2cea403c1fb7021/tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365", size = 444396, upload-time = "2025-05-22T18:15:34.205Z" }, - { url = "https://files.pythonhosted.org/packages/7b/09/6526e32bf1049ee7de3bebba81572673b19a2a8541f795d887e92af1a8bc/tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b", size = 444840, upload-time = "2025-05-22T18:15:36.1Z" }, - { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596, upload-time = "2025-05-22T18:15:37.433Z" }, + { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, + { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, + { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, ] [[package]] name = "tox" -version = "4.27.0" +version = "4.28.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, @@ -2790,24 +3039,25 @@ dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/b7/19c01717747076f63c54d871ada081cd711a7c9a7572f2225675c3858b94/tox-4.27.0.tar.gz", hash = "sha256:b97d5ecc0c0d5755bcc5348387fef793e1bfa68eb33746412f4c60881d7f5f57", size = 198351, upload-time = "2025-06-17T15:17:50.585Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/01/321c98e3cc584fd101d869c85be2a8236a41a84842bc6af5c078b10c2126/tox-4.28.4.tar.gz", hash = "sha256:b5b14c6307bd8994ff1eba5074275826620325ee1a4f61316959d562bfd70b9d", size = 199692, upload-time = "2025-07-31T21:20:26.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/3a/30889167f41ecaffb957ec4409e1cbc1d5d558a5bbbdfb734a5b9911930f/tox-4.27.0-py3-none-any.whl", hash = "sha256:2b8a7fb986b82aa2c830c0615082a490d134e0626dbc9189986da46a313c4f20", size = 173441, upload-time = "2025-06-17T15:17:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/564a33093e41a585e2e997220986182c037bc998abf03a0eb4a7a67c4eff/tox-4.28.4-py3-none-any.whl", hash = "sha256:8d4ad9ee916ebbb59272bb045e154a10fa12e3bbdcf94cc5185cbdaf9b241f99", size = 174058, upload-time = "2025-07-31T21:20:24.836Z" }, ] [[package]] name = "tox-uv" -version = "1.26.0" +version = "1.28.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "tox" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, { name = "uv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/da/37790b4a176f05b0ec7a699f54979078fc726f743640aa5c10c551c27edb/tox_uv-1.26.0.tar.gz", hash = "sha256:5045880c467eed58a98f7eaa7fe286b7ef688e2c56f2123d53e275011495c381", size = 21523, upload-time = "2025-05-27T14:51:42.702Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/9a/f4b675ebcbd623854129891e87045f80c1d8e91b2957496f1fe6e463f291/tox_uv-1.28.0.tar.gz", hash = "sha256:a06ff909f73232b2b7965de19090d887b12b44e44eb0843b2c07266d2957ade2", size = 23265, upload-time = "2025-08-14T17:53:07.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/b8/04c5cb83da072a3f96d357d68a551f5e97e162573c2011a09437df995811/tox_uv-1.26.0-py3-none-any.whl", hash = "sha256:894b2e7274fd6131c3bd1012813edc858753cad67727050c21cd973a08e691c8", size = 16562, upload-time = "2025-05-27T14:51:40.803Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ac/b32555d190c4440b8d2779d4a19439e5fbd5a3950f7e5a17ead7c7d30cad/tox_uv-1.28.0-py3-none-any.whl", hash = "sha256:3fbe13fa6eb6961df5512e63fc4a5cc0c8d264872674ee09164649f441839053", size = 17225, upload-time = "2025-08-14T17:53:06.299Z" }, ] [[package]] @@ -2833,57 +3083,57 @@ wheels = [ [[package]] name = "types-docker" -version = "7.1.0.20250523" +version = "7.1.0.20250809" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-requests" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/d6/7acc13bcd8ee773dbb0dac967091d8afe3ffae19a51c153c9771a8becd8d/types_docker-7.1.0.20250523.tar.gz", hash = "sha256:fd7a2dbc75cbf58170f2ae9ac31d6e810ead646a5b28c016698edb293d43d60d", size = 30995, upload-time = "2025-05-23T03:05:45.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/3c/d8f29819bc138a57d9230e7f099c4bcf8cd2c886270c3cf205f1905f63ff/types_docker-7.1.0.20250809.tar.gz", hash = "sha256:48f360e5b2d2d9f333f0b45c557097b2c06c48c450874130c49cf5b40a955497", size = 30961, upload-time = "2025-08-09T03:17:47.346Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/f0/188faad66fff121c0391a91f7fccda496fdae135e407712dfcca17720749/types_docker-7.1.0.20250523-py3-none-any.whl", hash = "sha256:ce6276bec00be41d1b00f87e31d0d39dcd0811a44c18f06b0046def3ee22b96e", size = 45816, upload-time = "2025-05-23T03:05:43.553Z" }, + { url = "https://files.pythonhosted.org/packages/69/df/85441ba1e35b9cdc4d700f931e3ec26964ef84904e0f493453b410c79136/types_docker-7.1.0.20250809-py3-none-any.whl", hash = "sha256:00a7fd764894d3f25d2c4c3ac13928398dcc2389af52335893ae2ceb09cfb712", size = 45808, upload-time = "2025-08-09T03:17:46.47Z" }, ] [[package]] name = "types-pytz" -version = "2025.2.0.20250516" +version = "2025.2.0.20250809" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/72/b0e711fd90409f5a76c75349055d3eb19992c110f0d2d6aabbd6cfbc14bf/types_pytz-2025.2.0.20250516.tar.gz", hash = "sha256:e1216306f8c0d5da6dafd6492e72eb080c9a166171fa80dd7a1990fd8be7a7b3", size = 10940, upload-time = "2025-05-16T03:07:01.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876, upload-time = "2025-08-09T03:14:17.453Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ba/e205cd11c1c7183b23c97e4bcd1de7bc0633e2e867601c32ecfc6ad42675/types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451", size = 10136, upload-time = "2025-05-16T03:07:01.075Z" }, + { url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095, upload-time = "2025-08-09T03:14:16.674Z" }, ] [[package]] name = "types-requests" -version = "2.32.4.20250611" +version = "2.32.4.20250809" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118, upload-time = "2025-06-11T03:11:41.272Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643, upload-time = "2025-06-11T03:11:40.186Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, ] [[package]] name = "types-tqdm" -version = "4.67.0.20250516" +version = "4.67.0.20250809" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/07/eb40de2dc2ff2d1a53180330981b1bdb42313ab4e1b11195d8d64c878b3c/types_tqdm-4.67.0.20250516.tar.gz", hash = "sha256:230ccab8a332d34f193fc007eb132a6ef54b4512452e718bf21ae0a7caeb5a6b", size = 17232, upload-time = "2025-05-16T03:09:52.091Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d0/cf498fc630d9fdaf2428b93e60b0e67b08008fec22b78716b8323cf644dc/types_tqdm-4.67.0.20250809.tar.gz", hash = "sha256:02bf7ab91256080b9c4c63f9f11b519c27baaf52718e5fdab9e9606da168d500", size = 17200, upload-time = "2025-08-09T03:17:43.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/92/df621429f098fc573a63a8ba348e731c3051b397df0cff278f8887f28d24/types_tqdm-4.67.0.20250516-py3-none-any.whl", hash = "sha256:1dd9b2c65273f2342f37e5179bc6982df86b6669b3376efc12aef0a29e35d36d", size = 24032, upload-time = "2025-05-16T03:09:51.226Z" }, + { url = "https://files.pythonhosted.org/packages/3f/13/3ff0781445d7c12730befce0fddbbc7a76e56eb0e7029446f2853238360a/types_tqdm-4.67.0.20250809-py3-none-any.whl", hash = "sha256:1a73053b31fcabf3c1f3e2a9d5ecdba0f301bde47a418cd0e0bdf774827c5c57", size = 24020, upload-time = "2025-08-09T03:17:42.453Z" }, ] [[package]] name = "typing-extensions" -version = "4.14.0" +version = "4.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, ] [[package]] @@ -2926,41 +3176,43 @@ wheels = [ [[package]] name = "uv" -version = "0.7.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/08/1bcafa9077965de397d927f291827a77a915d75567b42c3ad6bb6a2e0bcd/uv-0.7.13.tar.gz", hash = "sha256:05f3c03c4ea55d294f3da725b6c2c2ff544754c18552da7594def4ec3889dcfb", size = 3308772, upload-time = "2025-06-12T22:23:10.377Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/4e/cdf97c831be960e13c7db28b6ba226e5bdbfee9a38f6099687c7a395ec52/uv-0.7.13-py3-none-linux_armv6l.whl", hash = "sha256:59915aec9fd2b845708a76ddc6c0639cfc99b6e2811854ea2425ee7552aff0e9", size = 17073615, upload-time = "2025-06-12T20:58:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/27217e8a7a457bc9c068d99f2d860706649130755fa377306d75a326ce0b/uv-0.7.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9c457a84cfbe2019ba301e14edd3e1c950472abd0b87fc77622ab3fc475ba012", size = 17099887, upload-time = "2025-06-12T20:58:50.272Z" }, - { url = "https://files.pythonhosted.org/packages/46/c7/1d7ec2211732512ae43d7176242fea3eea1915c83565953014bbafcb6be2/uv-0.7.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4f828174e15a557d3bc0f809de76135c3b66bcbf524657f8ced9d22fc978b89c", size = 15800953, upload-time = "2025-06-12T20:58:52.897Z" }, - { url = "https://files.pythonhosted.org/packages/d8/5b/81ea6ec50890a064b37d8f8dc097901768f73c747d965ffd96f1ebdfacea/uv-0.7.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:88fcf2bfbb53309531a850af50d2ea75874099b19d4159625d0b4f88c53494b9", size = 16355391, upload-time = "2025-06-12T20:58:55.146Z" }, - { url = "https://files.pythonhosted.org/packages/64/24/92a30049a74bf17c9c4ffbf36462c5ff593617c2d0b78efb3c9d55293746/uv-0.7.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:721b058064150fc1c6d88e277af093d1b4f8bb7a59546fe9969d9ff7dbe3f6fd", size = 16819352, upload-time = "2025-06-12T20:58:57.299Z" }, - { url = "https://files.pythonhosted.org/packages/74/fe/8b4de3addc375ba00bd1a515a79aaccbb3a600bc66c03e5fd159d6928066/uv-0.7.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f28e70baadfebe71dcc2d9505059b988d75e903fc62258b102eb87dc4b6994a3", size = 17518852, upload-time = "2025-06-12T20:58:59.538Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/e9c14c6aba0316da7fe30b0dac4f8f6d1155d0422dcff1138b85f4eb4c08/uv-0.7.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9d2952a1e74c7027347c74cee1cb2be09121a5290db38498b8b17ff585f73748", size = 18405034, upload-time = "2025-06-12T20:59:01.747Z" }, - { url = "https://files.pythonhosted.org/packages/9d/62/a2f4147fa2714ce765104e2984abcdaa0605725b10ca70bee7de4a1ba88c/uv-0.7.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a51006c7574e819308d92a3452b22d5bd45ef8593a4983b5856aa7cb8220885f", size = 18120055, upload-time = "2025-06-12T20:59:03.997Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b2/f4381c1aa4d3d13ff36359e4176cd34d1da1548ba2a6c763a953b282ede0/uv-0.7.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33837aca7bdf02d47554d5d44f9e71756ee17c97073b07b4afead25309855bc7", size = 18283737, upload-time = "2025-06-12T20:59:06.437Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ef/f2e96cec5e4cf65d7fde89b5dcf9540ddacf42e8e39de2fa0332614e55a8/uv-0.7.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5786a29e286f2cc3cbda13a357fd9a4dd5bf1d7448a9d3d842b26b4f784a3a86", size = 17755308, upload-time = "2025-06-12T20:59:08.837Z" }, - { url = "https://files.pythonhosted.org/packages/34/6d/d7a1af8ece6d5cac5287d00e15b9650eb9d3203606add4cd035009d52de6/uv-0.7.13-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1afdbfcabc3425b383141ba42d413841c0a48b9ee0f4da65459313275e3cea84", size = 16611463, upload-time = "2025-06-12T20:59:10.971Z" }, - { url = "https://files.pythonhosted.org/packages/b4/e8/27294e3067295db8f54dbe8a1f64b6e3000adc1cba29f953c440bc184a5d/uv-0.7.13-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:866cad0d04a7de1aaa3c5cbef203f9d3feef9655972dcccc3283d60122db743b", size = 16759459, upload-time = "2025-06-12T22:22:44.278Z" }, - { url = "https://files.pythonhosted.org/packages/94/6a/36f055eb1b9a44d60eed9a5aa93cf0f23660a19ab07a5ef085331dd9fc0a/uv-0.7.13-py3-none-musllinux_1_1_i686.whl", hash = "sha256:527a12d0c2f4d15f72b275b6f4561ae92af76dd59b4624796fddd45867f13c33", size = 17108780, upload-time = "2025-06-12T22:22:48.412Z" }, - { url = "https://files.pythonhosted.org/packages/11/c1/0f09c0de0896d04b4bb81bdd7833643f055e8a5c2c04f8a2ddf3a74453d8/uv-0.7.13-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:4efa555b217e15767f0691a51d435f7bb2b0bf473fdfd59f173aeda8a93b8d17", size = 17900498, upload-time = "2025-06-12T22:22:50.93Z" }, - { url = "https://files.pythonhosted.org/packages/ce/6f/ee435b4ec3903617b5f592c0077ef0c1e22c41e2ab872be2134b223aabb2/uv-0.7.13-py3-none-win32.whl", hash = "sha256:b1af81e57d098b21b28f42ec756f0e26dce2341d59ba4e4f11759bc3ca2c0a99", size = 17329841, upload-time = "2025-06-12T22:22:57.517Z" }, - { url = "https://files.pythonhosted.org/packages/af/05/c16e2b9369d440e3c85439257bd679c3a92bdd248015238a8848941828f6/uv-0.7.13-py3-none-win_amd64.whl", hash = "sha256:8c0c29a2089ff9011d6c3abccd272f3ee6d0e166dae9e5232099fd83d26104d9", size = 18820166, upload-time = "2025-06-12T22:23:05.224Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ac/68fd18d5190515f9ddb31cc2f14e21d1b38bee721ece2d43c42e13646fa3/uv-0.7.13-py3-none-win_arm64.whl", hash = "sha256:e077dcac19e564cae8b4223b7807c2f617a59938f8142ca77fc6348ae9c6d0aa", size = 17456260, upload-time = "2025-06-12T22:23:08.227Z" }, +version = "0.8.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/c1/765112567045a2219979d1a7038e4a2afbddd0637446556b089e77252528/uv-0.8.11.tar.gz", hash = "sha256:d98105244b895c6026e9f3d86f200b70039d39a5f4866022fae664ed935530f3", size = 3504312, upload-time = "2025-08-14T19:48:18.071Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/2f/6703896c45d29b44e5954bb283f00616387cef7ae80188226dac87aff93d/uv-0.8.11-py3-none-linux_armv6l.whl", hash = "sha256:1be7cbc874980dc3e5e0c40fdb3787013a35cce64485f7685fc4b0ee550f7c0c", size = 18497046, upload-time = "2025-08-14T19:47:28.18Z" }, + { url = "https://files.pythonhosted.org/packages/61/fe/3ae518ea5a6c2e4fd3d0174486c841bd85e676b3971d9553445ab57319d9/uv-0.8.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:84c888cc7b3310aada6058ce964d9b48d4f7801add6f1236548adeb262c637bf", size = 18573000, upload-time = "2025-08-14T19:47:32.156Z" }, + { url = "https://files.pythonhosted.org/packages/00/21/6a1cd01103aec916fdf2daa034e3a179a6b835b25db89f4f5e43117ac68c/uv-0.8.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3e46395c7f2c7e52bf63f29f3fc1c6b357b011285d1df37d8af9c6f6f7cad36f", size = 17205164, upload-time = "2025-08-14T19:47:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/8a9e00d6e5c41a231f59f75c15b04626f7d4561364475962894a31b01fee/uv-0.8.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:d9d35783ac8600cd8e95e9afd007aa281edf3125803c570a4b3246138e2a304d", size = 17822163, upload-time = "2025-08-14T19:47:37.111Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/230f1ed3cbeae61d10ac8acc3d63b38a81c728161e7671fe3516aec72c76/uv-0.8.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ce267b13f498cebb9690c06461b727718bd11624679ddebb0a3998efe6b80ad7", size = 18152038, upload-time = "2025-08-14T19:47:39.951Z" }, + { url = "https://files.pythonhosted.org/packages/95/be/7fd436adedd79c9afad14722135029010a972e17b05312795a976bc08854/uv-0.8.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c03aec1ad898642ae427b763cf5e5f90a678b91254f733ae08d01d15acd3672b", size = 18991855, upload-time = "2025-08-14T19:47:42.664Z" }, + { url = "https://files.pythonhosted.org/packages/80/4e/2cca1be92fc3cdfddb5f2fa8d5650098948f357774cbe51810aaa5968da0/uv-0.8.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83aa9c8b0085949542674301268e2b7b541f1108dc95664dedf50fffd1578f97", size = 20248085, upload-time = "2025-08-14T19:47:45.489Z" }, + { url = "https://files.pythonhosted.org/packages/a5/9d/c4a5bbccfa45d8573d22da0d753329e572e72cd70796720dc0bc5c74e5c5/uv-0.8.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e9506b3febbce3559290cb10cd1c84dbed32bc4f4b1062bc2fe4f093aa42aea", size = 19961250, upload-time = "2025-08-14T19:47:47.963Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f1/c1f9e59110fce261ee67cff854b4f95cae39a523d2a076c7566a704ebbe6/uv-0.8.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba7bb038f0a263accefde1db68ecba7a756c85e6bcc25af161acef2711d6da19", size = 19314178, upload-time = "2025-08-14T19:47:50.469Z" }, + { url = "https://files.pythonhosted.org/packages/fc/47/c398c3a9657a6f8c3a7b1938ae0b7061c4087e1fbb00f83a7a4f79005752/uv-0.8.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36eb184758f18347547045a3aa7cc87c98a75c773e437c8a85878eb004a31c2e", size = 19314121, upload-time = "2025-08-14T19:47:54.17Z" }, + { url = "https://files.pythonhosted.org/packages/69/04/7ff94b68c33b93e89ec9920724b2a6d3992051584afd3410bf2604d2b93c/uv-0.8.11-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:0a7fcbe71cc5402b7c3d4c381f9b970a455d8ccc2a43ee2ce5ac2b617ec0534c", size = 18105431, upload-time = "2025-08-14T19:47:56.844Z" }, + { url = "https://files.pythonhosted.org/packages/09/5a/aee6041cd0c9ab1c56da61ba1e9ac30b4ea7c1c85471e19cb0cc1e415c0a/uv-0.8.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0da2c794dead209e660cb7df143ea9756c118ffa5874859e8a28a79101b5c760", size = 18984052, upload-time = "2025-08-14T19:47:59.927Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7b9926b676a3807312bfb91662813305b305c5218a05a9b651763b28267e/uv-0.8.11-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:0a95dc944d62db4ca282f7415c2d3c0fa3ead9e245a47d845515f5ddbd5a80ef", size = 18109344, upload-time = "2025-08-14T19:48:02.607Z" }, + { url = "https://files.pythonhosted.org/packages/82/19/1e90e45fd84c4f5512dc9c8ad0ac3a4792677725047d3e7299f9dae41406/uv-0.8.11-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0cd14f319e18a7b278238f0d87b18180282ec4d44d023f8b3ed2c8c091a14277", size = 18493945, upload-time = "2025-08-14T19:48:05.112Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/e6b784ede573d3f1ba6fafe70dd317b4543146a6c2ca88a5f56923518552/uv-0.8.11-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:261d19395a211f980d1ebc861356cf73ba23ceece2392c0b36ade38f89fd16a6", size = 19398023, upload-time = "2025-08-14T19:48:07.993Z" }, + { url = "https://files.pythonhosted.org/packages/65/5f/fd61ebec95bb5854c860d5268bc8ecbbca881465340f1e86302cacdd8234/uv-0.8.11-py3-none-win32.whl", hash = "sha256:0b922061f7b5915f224df23a849b6e1bfcace2e6b9fc0ee128868447873edb22", size = 18308608, upload-time = "2025-08-14T19:48:10.847Z" }, + { url = "https://files.pythonhosted.org/packages/bb/57/84358ea67cee7ec029ed0d51e801a64c5929b7d647ae31cd5e5aea0c6f61/uv-0.8.11-py3-none-win_amd64.whl", hash = "sha256:fe01737f3ddd533903f31236219c29e09063541f17a060403acc51906ce0cfe8", size = 20214609, upload-time = "2025-08-14T19:48:13.368Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/069a75703693d3297d95657957ea00d2f035896066f00a5692fbdce76d36/uv-0.8.11-py3-none-win_arm64.whl", hash = "sha256:cf3454d3407a5cac0d661b6033e3197643d0a6b5bb0e00869f6877ff7af907c9", size = 18878482, upload-time = "2025-08-14T19:48:15.743Z" }, ] [[package]] name = "virtualenv" -version = "20.31.2" +version = "20.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, ] [[package]] From 5ad6e956f5be3cd17874551dbf5504ba4307aec1 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Tue, 19 Aug 2025 08:07:02 +0000 Subject: [PATCH 07/20] saving progress till now; implemented an agent for filtering commits + start storing patch info --- README.md | 37 +- pyproject.toml | 13 + scratch/scripts/collect_commits.py | 15 +- scratch/scripts/filter_commits.py | 30 +- src/datasmith/__init__.py | 4 + src/datasmith/agents/__init__.py | 0 src/datasmith/agents/config.py | 20 + src/datasmith/agents/perf_judge.py | 239 ++ src/datasmith/docker/context_registry.py | 39 +- .../execution/collect_commits_offline.py | 165 +- src/datasmith/execution/utils.py | 120 +- uv.lock | 1944 ++++++++++++++++- 12 files changed, 2568 insertions(+), 58 deletions(-) create mode 100644 src/datasmith/agents/__init__.py create mode 100644 src/datasmith/agents/config.py create mode 100644 src/datasmith/agents/perf_judge.py diff --git a/README.md b/README.md index e324c13..e9663ab 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ Each of these repositories has a publicly accessible perpetually updating dashbo As all these dashboards have the same structure, we developed an ethical scraper that can scrape these dashboards and download the performance data in a structured format. The scraper is invoked using `scripts/download_dataset.py` and can be run as follows: ```bash -$ python scripts/download_dataset.py \ +$ python scratch/scripts/download_dataset.py \ --force \ --dashboards scratch/artifacts/raw/online_dashboards.jsonl # machines: 100%|██████████████████████████████████████| 7/7 [00:56<00:00, 8.05s/it] @@ -109,7 +109,7 @@ To detect performance improving commits, we provide two methods: Either method can be used by passing `--method 'asv'` or `--method 'rbf'` to the script. The `rupture` method is enabled by default as we might not have mean + standard deviation data for all commits in the dataset (that is required by `asv.step_detect`). ```bash -$ python scripts/detect_breakpoints.py \ +$ python scratch/scripts/detect_breakpoints.py \ --build-reports \ --method rbf \ --compute-coverage \ @@ -144,7 +144,7 @@ To run the script, you need to have a GitHub token with `repo` and `read:org` pe The scraper can be run using the following command: ```bash -$ python scripts/scrape_repositories.py \ +$ python scratch/scripts/scrape_repositories.py \ --outfile scratch/artifacts/processed/repos_discovered.csv \ --min-stars 500 \ --filtered-outfile scratch/artifacts/processed/repos_valid.csv @@ -159,14 +159,14 @@ The `scratch/artifacts/processed/repos_valid.csv` file contains a subset of the Given the list of repositories, we find the subset of commits that have already been closed and merged into the main branch (the top 5000 PRs, sorted by popularity). We use the `collect_commits.py` script to do this. The `filter_commits.py` script then filters out those commits that primarily modified the benchmarking files (e.g. `asv.conf.json`) or were not relevant to the benchmarks (e.g. documentation changes). The script also limits the number of repositories to a maximum of 350 to ensure we don't burden the GitHub API with too many requests. The scripts can be run as follows: ```bash -$ python scripts/collect_commits.py \ - --dashboards scratch/artifacts/raw/repos_valid.csv \ - --outfile scratch/artifacts/raw/commits_all.jsonl \ +$ python scratch/scripts/collect_commits.py \ + --dashboards scratch/artifacts/raw/repos_valid_sm.csv \ + --outfile scratch/artifacts/raw/commits_all_sm.jsonl \ --max-pages 50 -$ python scripts/filter_commits.py \ - --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid.csv \ - --merged-commits-pth scratch/artifacts/raw/commits_all.jsonl \ - --output-pth scratch/artifacts/raw/commits_filtered.jsonl \ +$ python scratch/scripts/filter_commits.py \ + --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid_sm.csv \ + --merged-commits-pth scratch/artifacts/raw/commits_all_sm.jsonl \ + --output-pth scratch/artifacts/raw/commits_filtered_sm.jsonl \ --max-repos 350 \ --threads 8 \ --procs 8 @@ -187,13 +187,12 @@ The `dependency_recommendations.json` file is a dictionary that contains recomme (sudo) $ export OMP_NUM_THREADS=1 (sudo) $ sudo python -m pyperf system tune # in userspace: -$ python scripts/benchmark_commits.py \ - --filtered-commits scratch/artifacts/raw/commits_filtered.jsonl \ - --dep-recs scratch/artifacts/raw/dependency_recommendations.json \ +$ python scratch/scripts/benchmark_commits.py \ + --filtered-commits scratch/artifacts/raw/commits_filtered_sm.jsonl \ --max-concurrency 30 \ --num-cores 2 \ --asv-args "--interleave-rounds --append-samples -a rounds=2 -a repeat=2" \ - --output-dir scratch/artifacts/benchmark_results/ + --output-dir scratch/artifacts/benchmark_results_sm/ ``` Generally, each benchmark takes ~2 minutes to run, so benchmarking 70,000 commits on 16 dedicated 4-core machines takes around 6 days. The script will create a directory called `scratch/artifacts/benchmark_results/` that contains the results of the benchmarks for each commit. The results are stored in a structured format that can be easily processed later. @@ -203,7 +202,7 @@ Generally, each benchmark takes ~2 minutes to run, so benchmarking 70,000 commit This step aggregates the benchmark results and generates the `*.fc.pkl` file. The `detect_breakpoints.py` script can then be used unchanged to detect performance improving commits. The script can be run as follows: ```bash -$ python scripts/collate_benchmark_results.py \ +$ python scratch/scripts/collate_benchmark_results.py \ --results-dir scratch/artifacts/benchmark_results/results \ --output-dir scratch/artifacts/benchmark_results/published/ \ --commit-metadata scratch/artifacts/raw/commits_filtered.jsonl \ @@ -213,11 +212,15 @@ $ python scripts/collate_benchmark_results.py \ # summaries: 100%|████████████████████████████████████████| 115/115 [00:00<00:00, 234.43it/s] # Saved 53,705 benchmark rows and 35,765 summary rows -> /home/???/formulacode/datasmith/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl # Benchmark results aggregated and saved to /home/???/formulacode/datasmith/benchmark_results/published/html. -$ python scripts/detect_breakpoints.py \ +$ python scratch/scripts/detect_breakpoints.py \ --build-reports \ --method rbf \ --compute-coverage \ --dataset scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl + +$ python scratch/scripts/validate_containers.py \ + --dashboard scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/dashboard.fc.pkl \ + --output-dir scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/containers/ # ... ``` @@ -228,7 +231,7 @@ The generated `breakpoints.fc.pkl` file contains all the information about the d How closely do our benchmarked metrics match the original performance improvements? We can answer this question by running the `scripts/replication_experiment.py` script. This script takes in two `breakpoints.fc.pkl` files, ensures that they point to the same repository, finds the common set of commits, and then computes the correlation between the performance improvements in the two datasets as well as some basic statistics and plots about the performance improvements. The script can be run as follows: ```bash -$ python scripts/replication_experiment.py \ +$ python scratch/scripts/replication_experiment.py \ --dataset1 scratch/artifacts/benchmark_results/published/html/scikit-learn_scikit-learn/breakpoints.fc.pkl \ --dataset2 scratch/artifacts/raw/downloads/sklearn/breakpoints.fc.pkl \ --output-dir scratch/artifacts/replication/ diff --git a/pyproject.toml b/pyproject.toml index 2b7834a..4e6984d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ classifiers = [ dependencies = [ "asv", "docker", + "dspy>=2.6.27", "gitpython", "numpy", "pandas", @@ -134,3 +135,15 @@ skip_empty = true [tool.coverage.run] branch = true source = ["src"] + +[[tool.mypy.overrides]] +module = "dspy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "datasmith.agents.perf_judge" +disable_error_code = ["no-any-unimported"] + +[[tool.mypy.overrides]] +module = "datasmith.execution.utils" +disallow_untyped_defs = false diff --git a/scratch/scripts/collect_commits.py b/scratch/scripts/collect_commits.py index 741dc48..99d6259 100644 --- a/scratch/scripts/collect_commits.py +++ b/scratch/scripts/collect_commits.py @@ -2,11 +2,14 @@ import pandas as pd -# from datasmith.execution.collect_commits import search_commits -from datasmith.execution.collect_commits_offline import search_commits +from datasmith.execution.collect_commits_offline import find_perf_commits, find_tagged_releases from datasmith.logging_config import configure_logging -# Configure logging for the script +# from datasmith.execution.collect_commits import search_commits +# logger = configure_logging( +# level=logging.DEBUG, +# stream=open(__file__ + ".log", "a"), +# ) logger = configure_logging() @@ -46,12 +49,15 @@ def parse_args() -> argparse.Namespace: all_commits = [] for repo_name, url in zip(repo_names, urls): logger.info("Collecting commits for %s (repo_name: %s)", url, repo_name) - commits = search_commits( + perf_commits = find_perf_commits( repo_name=repo_name, query=args.query, max_pages=args.max_pages, per_page=args.per_page, ) + tagged_commits = find_tagged_releases(repo_name=repo_name) + # parent_commits = find_parent_commits(repo_name=repo_name, commits=perf_commits + tagged_commits) + commits = list(set(perf_commits + tagged_commits)) for i, commit in enumerate(commits, 1): commit_id = f"{repo_name}_{i}" all_commits.append({ @@ -66,3 +72,4 @@ def parse_args() -> argparse.Namespace: with open(args.outfile, "w", encoding="utf-8") as f: for commit in all_commits: f.write(f"{commit}\n") + logger.info("Collected %d commits from %d repositories", len(all_commits), len(urls)) diff --git a/scratch/scripts/filter_commits.py b/scratch/scripts/filter_commits.py index 443916a..11a8d5d 100644 --- a/scratch/scripts/filter_commits.py +++ b/scratch/scripts/filter_commits.py @@ -34,7 +34,7 @@ def parse_args() -> argparse.Namespace: return p.parse_args() -def _asv_conf_worker(repo_name: str) -> str | None: +def _asv_conf_worker(repo_name: str) -> list[str] | None: """Locate asv.conf.json inside a repo (wrapper for ThreadPool).""" return find_file_in_tree(repo_name, "asv.conf.json") @@ -111,11 +111,6 @@ def main() -> None: commits = commits.dropna(subset=["commit_sha"]) all_repo_names = set(commits["repo_name"]) - import IPython - - IPython.embed( - header="I need to figure out why the filter function is taking out commits that seem to have gt info available" - ) # For debugging purposes, remove in production # download all repos to a temp dir with tempfile.TemporaryDirectory(prefix="gh-repos-") as td: @@ -131,7 +126,10 @@ def main() -> None: # multi_options=["--filter=tree:0"], multi_options=["--filter=blob:none"], quiet=True, + allow_unsafe_options=True, + allow_unsafe_protocols=True, ) + logger.debug("Cloned repo %s to %s", repo_name, path) all_repos[repo_name] = repo commit_info_args: list[tuple[Repo, str]] = [] @@ -148,16 +146,26 @@ def main() -> None: ) ) - commit_meta = pd.json_normalize(commits.pop("commit_info")) - commits = pd.concat([commits, commit_meta], axis=1) - commits = commits.dropna(subset=["asv_conf_path", "sha", "date", "message"]) - commits = commits[commits["files_changed"].apply(has_core_file)].reset_index(drop=True) + commit_meta = pd.json_normalize(commits.pop("commit_info")) # pyright: ignore[reportArgumentType] + commits_merged = commits.merge( + commit_meta, + how="left", + left_on=["commit_sha"], + right_on=["sha"], + ) + commits_merged = commits_merged.dropna(subset=["asv_conf_path", "sha", "date", "message"]) + assert len(commits_merged) == len(commits), "Merge should not change the number of rows" # noqa: S101 + commits_merged = commits_merged[commits_merged["files_changed"].apply(has_core_file)].reset_index(drop=True) + + for k, repo in all_repos.items(): + repo.close() + logger.debug("Closed repo %s", k) out_path = Path(args.output_pth) if not out_path.parent.exists(): out_path.parent.mkdir(parents=True, exist_ok=True) # commits.to_csv(out_path, index=False) - commits.to_json(out_path, orient="records", lines=True, index=False) + commits_merged.to_json(out_path, orient="records", lines=True, index=False) logger.info("✔ Wrote %s rows → %s", len(commits), out_path) diff --git a/src/datasmith/__init__.py b/src/datasmith/__init__.py index 0400270..f3ee4dd 100644 --- a/src/datasmith/__init__.py +++ b/src/datasmith/__init__.py @@ -1,5 +1,6 @@ import os +from datasmith.agents.config import configure_agent_backends from datasmith.logging_config import configure_logging # Configure logging with the centralized configuration @@ -15,5 +16,8 @@ def setup_environment() -> None: else: logger.warning("No tokens.env file found. Skipping environment variable setup.") + # Initialize agent backends + configure_agent_backends() + setup_environment() diff --git a/src/datasmith/agents/__init__.py b/src/datasmith/agents/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/datasmith/agents/config.py b/src/datasmith/agents/config.py new file mode 100644 index 0000000..8eafe18 --- /dev/null +++ b/src/datasmith/agents/config.py @@ -0,0 +1,20 @@ +import logging +import os + +import dspy # type: ignore[import-untyped] + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def configure_agent_backends() -> None: + model = os.getenv("DSPY_MODEL_NAME") + backend_url = os.getenv("DSPY_URL") + api_key = os.getenv("DSPY_API_KEY") + + if not model or not backend_url or not api_key: + logger.warning("Environment variables for DSPY model, URL, or API key are not set.") + return + + lm = dspy.LM(model=model, api_base=backend_url, api_key=api_key, model_type="chat") + dspy.configure(lm=lm) diff --git a/src/datasmith/agents/perf_judge.py b/src/datasmith/agents/perf_judge.py new file mode 100644 index 0000000..b6e35de --- /dev/null +++ b/src/datasmith/agents/perf_judge.py @@ -0,0 +1,239 @@ +import json +import re + +import dspy + +SYSTEM_PROMPT = """ +You are a classifier that decides whether a Git commit message describes a **performance enhancement to the product/runtime** (not merely tests/benchmarks/CI). + +### Count as performance (label **YES**) +- Runtime speedups (e.g., faster loops, vectorization, inlining, caching, avoiding imports, lazy init, non-blocking/async for throughput, lower latency). +- Startup/import time reductions, memory reductions, fewer allocations, less I/O, fewer syscalls. +- Fixing a **speed regression** or a change whose *intent* is “speed up”. +- Behavior changes **explicitly** framed as speeding things up (e.g., “non-blocking requests (speed-up …)”). + +### Do **NOT** count (label **NO**) unless the message clearly states product runtime gets faster: +- Test/bench/ASV/perf-test changes; thresholds; CI; coverage; Makefile/tox/pre-commit; refactors “for tests”. +- Merges, version bumps, housekeeping (“tidy”), or ambiguous “attempt to fix perf tests”. +- Pure UX frequency changes with “no measurable reduction in speed”. + +### Tie-breaker (recall-first) +If ambiguous but plausibly about product/runtime performance, prefer **YES**. Only choose **NO** when it clearly applies solely to tests/infra or non-runtime concerns. + +### Output +Return strict JSON with: +{ + "label": "YES" | "NO", + "reason": "one short sentence", + "confidence": 0-100, + "flags": ["tests-only" | "mentions-speed" | "startup" | "memory" | "non-blocking" | "regression" | "ambiguous" | "merge-or-version" | "infra"] +} +Do not include analysis steps—only the JSON object. +""" + +POSITIVE_RE = re.compile( + r"(perf\b|performance|speed(?:\s*up|(?:\s*-\s*)?up)?|faster|fasterer|" + r"reduce\s+(latency|allocations?|memory|overhead)|latency|throughput|" + r"optimi[sz]e(?:d|s|r)?|optimization|micro[-\s]?opt|vectori[sz]e|" + r"inlin(?:e|ing)|cache|cached|caching|memoiz|non[-\s]?blocking|async|" + r"import\s*time|startup|boot\s*time|hot\s*path|tighten\s*loop|" + r"regression.*(speed|perf)|\[perf\]|^perf:)", + re.I, +) + +NEGATIVE_STRONG_RE = re.compile( + r"(^merge\b|^bump\b|version\b|release\b|chore\b|tidy\b|housekeep|" + r"^revert\b(?!.*regression)|" + r"^tests?:|tests?\b|bench(mark)?\b|asv\b|capsys\b|thresholds?\b|" + r"coverage\b|tox\b|pre-commit\b|makefile\b|ci\b|\bflake8\b|\blint)", + re.I, +) + +TESTS_ONLY_HINT = re.compile( + r"(tests?:|perf[-\s]?tests?|asv|benchmark|capsys|threshold|coverage|tox|pre-commit|pytest|unittest)", re.I +) + + +def heuristic_prior(msg: str) -> tuple[bool | None, int, list[str]]: + m = msg.strip() + pos = bool(POSITIVE_RE.search(m)) + # neg = bool(NEGATIVE_STRONG_RE.search(m)) + tests_only = bool(TESTS_ONLY_HINT.search(m)) and not re.search(r"(runtime|import|startup|prod(uction)?)", m, re.I) + + flags = [] + flag_checks = [ + (r"non[-\s]?blocking|async", "non-blocking"), + (r"import\s*time|startup|boot\s*time", "startup"), + (r"memory|alloc", "memory"), + (r"regression", "regression"), + (r"speed|faster|perf|optimi[sz]", "mentions-speed"), + (r"merge|bump|version|release", "merge-or-version"), + ] + for pattern, flag in flag_checks: + if re.search(pattern, m, re.I): + flags.append(flag) + if tests_only: + flags.append("tests-only") + + # Recall-first decision: + if not pos and tests_only: + return False, 65, flags + if pos: + if tests_only: + return True, 55, [*flags, "ambiguous"] + return True, 80, flags + return None, 50, flags + + +class JudgeSignature(dspy.Signature): + """You are a classifier that decides whether a Git commit message describes a **performance enhancement to the product/runtime** (not merely tests/benchmarks/CI). + ### Count as performance (label **YES**) + - Runtime speedups (e.g., faster loops, vectorization, inlining, caching, avoiding imports, lazy init, non-blocking/async for throughput, lower latency). + - Startup/import time reductions, memory reductions, fewer allocations, less I/O, fewer syscalls. + - Fixing a **speed regression** or a change whose *intent* is “speed up”. + - Behavior changes **explicitly** framed as speeding things up (e.g., “non-blocking requests (speed-up …)”). + + ### Do **NOT** count (label **NO**) unless the message clearly states product runtime gets faster: + - Test/bench/ASV/perf-test changes; thresholds; CI; coverage; Makefile/tox/pre-commit; refactors “for tests”. + - Merges, version bumps, housekeeping (“tidy”), or ambiguous “attempt to fix perf tests”. + - Pure UX frequency changes with “no measurable reduction in speed”. + + ### Tie-breaker (recall-first) + If ambiguous but plausibly about product/runtime performance, prefer **YES**. Only choose **NO** when it clearly applies solely to tests/infra or non-runtime concerns. + + ### Output + Return strict JSON with: + { + "label": "YES" | "NO", + "reason": "one short sentence", + "confidence": 0-100, + "flags": ["tests-only" | "mentions-speed" | "startup" | "memory" | "non-blocking" | "regression" | "ambiguous" | "merge-or-version" | "infra"] + } + Do not include analysis steps—only the JSON object. + """ + + message = dspy.InputField(desc="A single commit message string.") + debug_json = dspy.OutputField( + desc="JSON dump of the model's internal state, useful for debugging.", + default=None, + ) + + +class LLMJudge(dspy.Module): + def __init__(self) -> None: + super().__init__() + self.predict = dspy.Predict(JudgeSignature) + + def forward(self, message: str) -> dspy.Prediction: + prediction = self.predict(message=message) + out: str = prediction.get("debug_json", None) # pyright: ignore[reportAttributeAccessIssue] + try: + data = json.loads(out) + except Exception: + data = {"label": "YES", "reason": "Permissive fallback", "confidence": 40, "flags": ["ambiguous"]} + + data["label"] = "YES" if str(data.get("label", "YES")).upper().startswith("Y") else "NO" + try: + data["confidence"] = max(0, min(100, int(data.get("confidence", 50)))) + except Exception: + data["confidence"] = 50 + data["flags"] = list(dict.fromkeys(map(str, data.get("flags", [])))) # dedupe + return dspy.Prediction(json=json.dumps(data)) + + +class PerfClassifier(dspy.Module): + """ + Pipeline: + 1) Heuristic prior (recall-first) + 2) LLM judge (JSON) + 3) Combine: favor YES unless strong evidence of tests-only/infra-only + """ + + def __init__(self) -> None: + super().__init__() + self.judge = LLMJudge() + + def forward(self, message: str) -> dspy.Prediction: + prior_label, prior_conf, prior_flags = heuristic_prior(message) + if prior_label is True and prior_conf >= 55: + result = { + "label": "YES", + "reason": "Positive performance cues in message.", + "confidence": prior_conf, + "flags": prior_flags, + } + return dspy.Prediction(json=json.dumps(result)) + + # Ask LLM judge + judged = json.loads(self.judge(message=message).json) # pyright: ignore[reportAttributeAccessIssue] + + tests_only = "tests-only" in prior_flags or "tests-only" in judged.get("flags", []) + if judged["label"] == "YES": + return dspy.Prediction(json=json.dumps(judged)) + if prior_label is True and not tests_only: + judged["label"] = "YES" + judged["reason"] = "Recall-first override: positive perf hints." + judged["confidence"] = max(judged["confidence"], 60) + judged["flags"] = list(dict.fromkeys(judged.get("flags", []) + prior_flags + ["ambiguous"])) + return dspy.Prediction(json=json.dumps(judged)) + + # Otherwise respect NO (or explicit tests-only) + if tests_only: + judged["label"] = "NO" + judged["reason"] = "Tests/bench/infra-only message." + judged["confidence"] = max(judged["confidence"], prior_conf, 70) + judged["flags"] = list(dict.fromkeys(judged.get("flags", []) + prior_flags + ["infra"])) + return dspy.Prediction(json=json.dumps(judged)) + + def get_response(self, message: str) -> tuple[bool, str]: + """ + Get the label for a commit message. + """ + json_str = self(message=message).json # pyright: ignore[reportAttributeAccessIssue] + response = json.loads(json_str) + return (response["label"] == "YES", json_str) + + +# if __name__ == "__main__": +# classifier = PerfClassifier() +# examples = [ +# "Speed up tqdm.auto import when not in an IPython notebook", +# ">5% speed increase on empty loops", +# "fix speed regression by inlining", +# "non-blocking requests (speed-up factor ~0.02s/it)", +# "tests: fix asv", +# "tests:perf:capsys upgrades", +# "revert to N_BAR=10 as default, a slightly faster update interval looks better without measurable reduction in iteration speed", +# "performance/optimisation and slight tidy", +# "better ETA for wildly varying iteration speeds", +# ] +# for m in examples: +# print(m) +# print(json.loads(classifier(message=m))) +# print() + +# from dspy.teleprompt import BootstrapFewShot +# train = [ +# dspy.Example(message="Speed up tqdm.auto import when not in an IPython notebook", json='{"label":"YES","reason":"Import-time speedup","confidence":90,"flags":["startup","mentions-speed"]}').with_inputs("message"), +# dspy.Example(message=">5% speed increase on empty loops", json='{"label":"YES","reason":"Explicit runtime speedup","confidence":95,"flags":["mentions-speed"]}').with_inputs("message"), +# dspy.Example(message="fix speed regression by inlining", json='{"label":"YES","reason":"Fixes speed regression","confidence":95,"flags":["regression","mentions-speed","inlining"]}').with_inputs("message"), +# dspy.Example(message="non-blocking requests (speed-up factor ~0.02s/it)", json='{"label":"YES","reason":"Throughput via non-blocking","confidence":85,"flags":["non-blocking","mentions-speed"]}').with_inputs("message"), +# dspy.Example(message="tests: fix asv", json='{"label":"NO","reason":"ASV/tests only","confidence":85,"flags":["tests-only","infra"]}').with_inputs("message"), +# dspy.Example(message="tests:perf:capsys upgrades", json='{"label":"NO","reason":"Perf tests infra","confidence":85,"flags":["tests-only","infra"]}').with_inputs("message"), +# dspy.Example(message="revert to N_BAR=10 as default, ... looks better without measurable reduction in iteration speed", json='{"label":"NO","reason":"UI freq, not faster runtime","confidence":80,"flags":["ambiguous"]}').with_inputs("message"), +# ] +# def recall_weighted_metric(golds, preds): +# # penalize FN 3x more than FP +# tp=fp=tn=fn=0 +# for g,p in zip(golds,preds): +# g_y = json.loads(g.json)["label"] == "YES" +# p_y = json.loads(p.json)["label"] == "YES" +# if g_y and p_y: tp+=1 +# elif (not g_y) and p_y: fp+=1 +# elif (not g_y) and (not p_y): tn+=1 +# else: fn+=1 +# # higher is better +# return (tp - 3*fn) - 0.5*fp +# tele = BootstrapFewShot(metric=recall_weighted_metric, max_bootstrapped_demos=6, max_labeled_demos=6) +# optimized = tele.compile(PerfClassifier(), trainset=train) # returns an optimized program +# import IPython; IPython.embed(header="perf_judge.py: debugging") diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index 617ad0f..03a1c65 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -28,15 +28,36 @@ def register(self, key: str, context: DockerContext) -> None: logger.debug(f"Registered Docker context: {key}") def get(self, key: str) -> DockerContext: - """Retrieve a Docker context by key.""" - if key in self.registry: - return self.registry[key] - # for asv-owner-repo-sha, check if asv-owner-repo exists. - logger.debug(f"Context '{key}' not found in registry. Searching for a matching context.") - owner_repo_key = key.rsplit("-", 1)[0] - if owner_repo_key in self.registry: - logger.debug(f"Found context '{owner_repo_key}' for key '{key}'.") - return self.registry[owner_repo_key] + """ + Retrieve a Docker context by key using hierarchical matching. + "asv-astropy-astropy-14134" should query these queries in-order: + "asv-astropy-astropy-14134" + "asv-astropy-astropy" + """ + # Build candidate keys in the required order, deduplicated while preserving order. + candidates = [key] + + if "-" in key: + # e.g., "asv-owner-repo-sha" -> "asv-owner-repo" + owner_repo_key = key.rsplit("-", 1)[0] + candidates.append(owner_repo_key) + + # Preserve order but remove duplicates + seen = set() + ordered_candidates = [] + for c in candidates: + if c not in seen: + ordered_candidates.append(c) + seen.add(c) + + # Try each candidate in order + for candidate in ordered_candidates: + if candidate in self.registry: + if candidate == key: + logger.debug(f"Found exact context for key '{key}'.") + else: + logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") + return self.registry[candidate] logger.info(f"No context found for key '{key}'. Using default context.") return self.registry["default"] diff --git a/src/datasmith/execution/collect_commits_offline.py b/src/datasmith/execution/collect_commits_offline.py index 8593a9c..b04b987 100644 --- a/src/datasmith/execution/collect_commits_offline.py +++ b/src/datasmith/execution/collect_commits_offline.py @@ -1,15 +1,20 @@ from __future__ import annotations +import contextlib import os import re +import sys import tempfile import urllib.parse +from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait from pathlib import Path from git import GitCommandError, Repo +from tqdm.auto import tqdm from datasmith import logger -from datasmith.utils import CACHE_LOCATION, _get_github_metadata, cache_completion +from datasmith.agents.perf_judge import PerfClassifier +from datasmith.utils import CACHE_LOCATION, cache_completion _PR_MERGE_PATTERNS: tuple[re.Pattern[str], ...] = ( # standard "Merge pull request #123 ..." @@ -39,15 +44,92 @@ def _is_pr_merge(message: str) -> bool: return any(p.search(message) for p in _PR_MERGE_PATTERNS) -def _is_public(repo_name: str) -> bool: +def find_parent_commits(repo_name: str, commits: list[str]) -> list[str]: """ - Check if a repo is public. + Return a list of commit SHAs that are parent commits of the given commits, + **without** calling any GitHub API endpoints. """ - return _get_github_metadata(f"/repos/{repo_name}") is not None + with tempfile.TemporaryDirectory(prefix="gh-history-") as workdir: + workdir_path = Path(workdir) + url = f"https://github.com/{repo_name}.git" + # Clone *just* the commit / tree metadata (no blobs). + clone_kwargs: dict = { + "multi_options": ["--filter=tree:0"], + "no_checkout": True, + } -@cache_completion(CACHE_LOCATION, "search_commits_offline") -def search_commits( + # ignore if repo is not public + try: + repo = Repo.clone_from( + url, + workdir_path, + env={"GIT_TERMINAL_PROMPT": "0", **os.environ}, + **clone_kwargs, + ) + except GitCommandError as e: + if e.status == 128: + msg = e.stderr.strip() or "authentication failed or repository not found" + logger.warning("Cannot clone %s: %s", url, msg) + return [] + raise + + parent_commits = set() + for commit_sha in commits: + try: + commit = repo.commit(commit_sha) + # Add parent commits if they exist + for parent in commit.parents: + parent_commits.add(parent.hexsha) + except Exception as e: + logger.warning(f"Could not find commit {commit_sha} in {repo_name}: {e}") + + logger.info(f"Collected {len(parent_commits)} parent commits from {repo_name}.") + return sorted(parent_commits) + + +def find_tagged_releases(repo_name: str) -> list[str]: + """ + Return a list of commit SHAs that are tagged releases, **without** + calling any GitHub API endpoints. + """ + with tempfile.TemporaryDirectory(prefix="gh-history-") as workdir: + workdir_path = Path(workdir) + url = f"https://github.com/{repo_name}.git" + + # Clone *just* the commit / tree metadata (no blobs). + clone_kwargs: dict = { + "multi_options": ["--filter=tree:0"], + "no_checkout": True, + } + + # ignore if repo is not public + try: + repo = Repo.clone_from( + url, + workdir_path, + env={"GIT_TERMINAL_PROMPT": "0", **os.environ}, + **clone_kwargs, + ) + except GitCommandError as e: + if e.status == 128: + msg = e.stderr.strip() or "authentication failed or repository not found" + logger.warning("Cannot clone %s: %s", url, msg) + return [] + raise + + merge_shas: set[str] = set() + for tag in repo.tags: + if tag.commit.hexsha not in merge_shas: + merge_shas.add(tag.commit.hexsha) + + logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") + + return sorted(merge_shas) + + +@cache_completion(CACHE_LOCATION, "find_perf_commits") +def find_perf_commits( # noqa: C901 repo_name: str, query: str, max_pages: int = 100, # ignored (kept for compatibility) @@ -62,9 +144,14 @@ def search_commits( • selects commits whose message looks like a PR merge The only element of *query* we still honour is `base=`. + Uses an AI Agent to find performance-related commits. """ qs = urllib.parse.parse_qs(query, keep_blank_values=True) base_branch: str | None = qs.get("base", [None])[0] + n_workers = qs.get("n_workers", [1])[0] + n_workers = int(n_workers) if isinstance(n_workers, str) else 1 + + perf_classifier = PerfClassifier() with tempfile.TemporaryDirectory(prefix="gh-history-") as workdir: workdir_path = Path(workdir) @@ -97,9 +184,69 @@ def search_commits( branch = base_branch or _default_branch(repo) ref_to_walk = f"origin/{branch}" + commits = [(c.hexsha, c.message) for c in repo.iter_commits(ref_to_walk)] + + def process_commit_tuple(t: tuple[str, str | bytes]) -> str | None: + hexsha, message = t + full_msg = message.strip() + + if not _is_pr_merge(str(full_msg)): + logger.debug(f"Skipping commit {hexsha} as it is not a PR merge.") + return None + + full_msg = re.sub(r"\nSigned-off-by:.*", "", str(full_msg)).replace("\n\n", "\n").strip() + if len(full_msg.split()) > 2048: + full_msg = " ".join(full_msg.split()[:2048]) + "..." + + is_perf, agent_trace = perf_classifier.get_response(message=str(full_msg)) + if not is_perf: + logger.debug(f"Skipping commit {hexsha} as it is not a performance commit.") + logger.debug(f"Agent trace: {agent_trace}") + return None + + return hexsha + merge_shas: set[str] = set() - for commit in repo.iter_commits(ref_to_walk): - if _is_pr_merge(str(commit.message)): - merge_shas.add(commit.hexsha) + max_workers = n_workers + # keep a small multiple of workers in-flight; adjust if you want more buffering + window = max_workers * 4 + + with ThreadPoolExecutor(max_workers=max_workers) as ex: + pbar = tqdm( + total=len(commits), + desc=f"Walking {repo_name} commits", + unit="commit", + file=sys.stdout, + miniters=1, + mininterval=0.1, + ) + + it = iter(commits) + pending = set() + + # prime the window + for _ in range(min(window, len(commits))): + pending.add(ex.submit(process_commit_tuple, next(it))) + + while pending: + done, pending = wait(pending, return_when=FIRST_COMPLETED) + + for fut in done: + try: + sha = fut.result() + if sha: + merge_shas.add(sha) + except Exception: + # don't let one bad task kill the progress loop + logger.exception("Worker failed") + finally: + pbar.update(1) + + # backfill one task for each completed, keeping the window steady + with contextlib.suppress(StopIteration): + pending.add(ex.submit(process_commit_tuple, next(it))) + + pbar.close() + logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") return sorted(merge_shas) diff --git a/src/datasmith/execution/utils.py b/src/datasmith/execution/utils.py index acb0724..16530f7 100644 --- a/src/datasmith/execution/utils.py +++ b/src/datasmith/execution/utils.py @@ -1,3 +1,4 @@ +import re from typing import Any from git import BadName, GitCommandError, Repo @@ -9,6 +10,103 @@ logger = get_logger("execution.utils") +_FLAG_MAP = { + "i": re.IGNORECASE, + "m": re.MULTILINE, + "s": re.DOTALL, + "x": re.VERBOSE, +} + + +def _parse_flag_string(flag_str: str) -> int: + flags = 0 + for ch in flag_str: + flags |= _FLAG_MAP.get(ch, 0) + return flags + + +def _compile_patterns(raws: list[str], base_flags: int) -> list[re.Pattern[str]]: + """ + Accepts: + - raw regex strings (inline flags like (?i) allowed) + - or delimited tokens `/pattern/imsx` (flags optional) + Compiles with base_flags OR any trailing delimited flags. + Invalid patterns are skipped with a warning. + """ + compiled: list[re.Pattern[str]] = [] + for raw in raws: + pat = raw + flags = base_flags + + # /pattern/flags style (e.g., /\bperf\b/i) + m = re.fullmatch(r"/(.*?)/([imsx]*)", raw) + if m: + pat = m.group(1) + flags |= _parse_flag_string(m.group(2)) + + try: + compiled.append(re.compile(pat, flags)) + except re.error as e: + logger.warning("Ignoring invalid regex %r: %s", raw, e) + return compiled + + +def _any_match(patterns: list[re.Pattern[str]] | str, text: str) -> bool: + if isinstance(patterns, str): + return False # should not happen, but just in case + return any(p.search(text) for p in patterns) + + +def _get_grep_params(qs: dict[str, list[str]]) -> dict[str, list[re.Pattern[str]] | str]: + base_flags = _parse_flag_string(qs.get("grep_flags", [""])[0]) + pos_any = _compile_patterns(qs.get("grep", []), base_flags) + pos_title = _compile_patterns(qs.get("grep_title", []), base_flags) + pos_msg = _compile_patterns(qs.get("grep_msg", []), base_flags) + + neg_any = _compile_patterns(qs.get("grep_not", []), base_flags) + neg_title = _compile_patterns(qs.get("grep_title_not", []), base_flags) + neg_msg = _compile_patterns(qs.get("grep_msg_not", []), base_flags) + + grep_mode = (qs.get("grep_mode", ["any"])[0] or "any").lower() + if grep_mode not in {"any", "all"}: + grep_mode = "any" + + return { + "pos_any": pos_any, + "pos_title": pos_title, + "pos_msg": pos_msg, + "neg_any": neg_any, + "neg_title": neg_title, + "neg_msg": neg_msg, + "grep_mode": grep_mode, + } + + +def _neg_matches(grep_params: dict[str, list[re.Pattern[str]] | str], title: str, message: str) -> bool: + return ( + _any_match(grep_params["neg_any"], title) + or _any_match(grep_params["neg_any"], message) + or _any_match(grep_params["neg_title"], title) + or _any_match(grep_params["neg_msg"], message) + ) + + +def _pos_matches(grep_params: dict[str, list[re.Pattern[str]] | str], title: str, message: str) -> bool: + if grep_params["pos_any"] or grep_params["pos_title"] or grep_params["pos_msg"]: + checks = [] + if grep_params["pos_any"]: + checks.append(_any_match(grep_params["pos_any"], title) or _any_match(grep_params["pos_any"], message)) + if grep_params["pos_title"]: + checks.append(_any_match(grep_params["pos_title"], title)) + if grep_params["pos_msg"]: + checks.append(_any_match(grep_params["pos_msg"], message)) + + ok = any(checks) if grep_params["grep_mode"] == "any" else all(checks) + if not ok: + return True + return True + + def _get_commit_info(repo_name: str, commit_sha: str) -> dict: try: commit_info = _get_github_metadata(endpoint=f"/repos/{repo_name}/commits/{commit_sha}") @@ -51,7 +149,7 @@ def _get_commit_info(repo_name: str, commit_sha: str) -> dict: @cache_completion(CACHE_LOCATION, "get_commit_info_offline") -def _get_commit_info_offline(repo: Repo, commit_sha: str) -> dict[str, Any]: +def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> dict[str, Any]: """ Return commit metadata and diff stats *without* the GitHub REST API. @@ -62,7 +160,6 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str) -> dict[str, Any]: """ try: commit = repo.commit(commit_sha) - except (BadName, ValueError): logger.exception("Maybe commit not found: %s", commit_sha) repo.git.fetch("--no-filter", "--quiet", "origin", commit_sha) @@ -77,10 +174,18 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str) -> dict[str, Any]: "total_deletions": 0, "total_files_changed": 0, "files_changed": "", + "patch": "", } stats = commit.stats + # get text based patch patch + patch = ( + repo.git.format_patch("--stdout", "-1", commit.hexsha) + .encode("utf-8", "surrogateescape") + .decode("utf-8", "backslashreplace") + ) + return { "sha": commit.hexsha, "date": commit.committed_datetime.isoformat(), @@ -89,6 +194,7 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str) -> dict[str, Any]: "total_deletions": stats.total["deletions"], "total_files_changed": stats.total["files"], "files_changed": "\n".join(str(k) for k in stats.files), + "patch": patch, } @@ -98,23 +204,23 @@ def find_file_in_tree(repo: str, filename: str, branch: str | None = None) -> li # sometimes the API returns a single-element list if isinstance(repo_info, list): if len(repo_info) == 1: - repo_info = repo_info[0] + repo_info = repo_info[0] # pyright: ignore[reportArgumentType] else: raise ValueError(f"Expected one repo info object, got {len(repo_info)}") # noqa: TRY003 - branch = repo_info.get("default_branch") + branch = repo_info.get("default_branch") # pyright: ignore[reportOptionalMemberAccess] if not branch: raise ValueError("Could not determine the default branch for this repository") # noqa: TRY003 r = _get_github_metadata(endpoint=f"/repos/{repo}/git/refs/heads/{branch}") if isinstance(r, list): if len(r) == 1: - r = r[0] + r = r[0] # pyright: ignore[reportArgumentType] else: raise ValueError() - sha = r["object"]["sha"] + sha = r["object"]["sha"] # pyright: ignore[reportOptionalSubscript] r = _get_github_metadata(endpoint=f"/repos/{repo}/git/trees/{sha}?recursive=1") - tree = r["tree"] + tree = r["tree"] # pyright: ignore[reportOptionalSubscript] # 4) Return any blobs whose path ends with the filename matches = [entry["path"] for entry in tree if entry["type"] == "blob" and entry["path"].endswith(filename)] diff --git a/uv.lock b/uv.lock index 79cefbb..3be8a94 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9, <4.0" resolution-markers = [ "python_full_version >= '3.12'", @@ -8,6 +8,170 @@ resolution-markers = [ "python_full_version < '3.10'", ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/dc/ef9394bde9080128ad401ac7ede185267ed637df03b51f05d14d1c99ad67/aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc", size = 703921, upload-time = "2025-07-29T05:49:43.584Z" }, + { url = "https://files.pythonhosted.org/packages/8f/42/63fccfc3a7ed97eb6e1a71722396f409c46b60a0552d8a56d7aad74e0df5/aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af", size = 480288, upload-time = "2025-07-29T05:49:47.851Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a2/7b8a020549f66ea2a68129db6960a762d2393248f1994499f8ba9728bbed/aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421", size = 468063, upload-time = "2025-07-29T05:49:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/8f/f5/d11e088da9176e2ad8220338ae0000ed5429a15f3c9dfd983f39105399cd/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79", size = 1650122, upload-time = "2025-07-29T05:49:51.874Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6b/b60ce2757e2faed3d70ed45dafee48cee7bfb878785a9423f7e883f0639c/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77", size = 1624176, upload-time = "2025-07-29T05:49:53.805Z" }, + { url = "https://files.pythonhosted.org/packages/dd/de/8c9fde2072a1b72c4fadecf4f7d4be7a85b1d9a4ab333d8245694057b4c6/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c", size = 1696583, upload-time = "2025-07-29T05:49:55.338Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ad/07f863ca3d895a1ad958a54006c6dafb4f9310f8c2fdb5f961b8529029d3/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4", size = 1738896, upload-time = "2025-07-29T05:49:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/20/43/2bd482ebe2b126533e8755a49b128ec4e58f1a3af56879a3abdb7b42c54f/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6", size = 1643561, upload-time = "2025-07-29T05:49:58.762Z" }, + { url = "https://files.pythonhosted.org/packages/23/40/2fa9f514c4cf4cbae8d7911927f81a1901838baf5e09a8b2c299de1acfe5/aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2", size = 1583685, upload-time = "2025-07-29T05:50:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c3/94dc7357bc421f4fb978ca72a201a6c604ee90148f1181790c129396ceeb/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d", size = 1627533, upload-time = "2025-07-29T05:50:02.306Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3f/1f8911fe1844a07001e26593b5c255a685318943864b27b4e0267e840f95/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb", size = 1638319, upload-time = "2025-07-29T05:50:04.282Z" }, + { url = "https://files.pythonhosted.org/packages/4e/46/27bf57a99168c4e145ffee6b63d0458b9c66e58bb70687c23ad3d2f0bd17/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5", size = 1613776, upload-time = "2025-07-29T05:50:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/1d2d9061a574584bb4ad3dbdba0da90a27fdc795bc227def3a46186a8bc1/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b", size = 1693359, upload-time = "2025-07-29T05:50:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/bee429b52233c4a391980a5b3b196b060872a13eadd41c3a34be9b1469ed/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065", size = 1716598, upload-time = "2025-07-29T05:50:09.33Z" }, + { url = "https://files.pythonhosted.org/packages/57/39/b0314c1ea774df3392751b686104a3938c63ece2b7ce0ba1ed7c0b4a934f/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1", size = 1644940, upload-time = "2025-07-29T05:50:11.334Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/3dacb8d3f8f512c8ca43e3fa8a68b20583bd25636ffa4e56ee841ffd79ae/aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a", size = 429239, upload-time = "2025-07-29T05:50:12.803Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f9/470b5daba04d558c9673ca2034f28d067f3202a40e17804425f0c331c89f/aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830", size = 452297, upload-time = "2025-07-29T05:50:14.266Z" }, + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/18/8d/da08099af8db234d1cd43163e6ffc8e9313d0e988cee1901610f2fa5c764/aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98", size = 706829, upload-time = "2025-07-29T05:51:54.434Z" }, + { url = "https://files.pythonhosted.org/packages/4e/94/8eed385cfb60cf4fdb5b8a165f6148f3bebeb365f08663d83c35a5f273ef/aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406", size = 481806, upload-time = "2025-07-29T05:51:56.355Z" }, + { url = "https://files.pythonhosted.org/packages/38/68/b13e1a34584fbf263151b3a72a084e89f2102afe38df1dce5a05a15b83e9/aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d", size = 469205, upload-time = "2025-07-29T05:51:58.277Z" }, + { url = "https://files.pythonhosted.org/packages/38/14/3d7348bf53aa4af54416bc64cbef3a2ac5e8b9bfa97cc45f1cf9a94d9c8d/aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf", size = 1644174, upload-time = "2025-07-29T05:52:00.23Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ed/fd9b5b22b0f6ca1a85c33bb4868cbcc6ae5eae070a0f4c9c5cad003c89d7/aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6", size = 1618672, upload-time = "2025-07-29T05:52:02.272Z" }, + { url = "https://files.pythonhosted.org/packages/39/f7/f6530ab5f8c8c409e44a63fcad35e839c87aabecdfe5b8e96d671ed12f64/aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142", size = 1692295, upload-time = "2025-07-29T05:52:04.546Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/3cf483bb0106566dc97ebaa2bb097f5e44d4bc4ab650a6f107151cd7b193/aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89", size = 1731609, upload-time = "2025-07-29T05:52:06.552Z" }, + { url = "https://files.pythonhosted.org/packages/de/a4/fd04bf807851197077d9cac9381d58f86d91c95c06cbaf9d3a776ac4467a/aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263", size = 1637852, upload-time = "2025-07-29T05:52:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/98/03/29d626ca3bcdcafbd74b45d77ca42645a5c94d396f2ee3446880ad2405fb/aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530", size = 1572852, upload-time = "2025-07-29T05:52:11.508Z" }, + { url = "https://files.pythonhosted.org/packages/5f/cd/b4777a9e204f4e01091091027e5d1e2fa86decd0fee5067bc168e4fa1e76/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75", size = 1620813, upload-time = "2025-07-29T05:52:13.891Z" }, + { url = "https://files.pythonhosted.org/packages/ae/26/1a44a6e8417e84057beaf8c462529b9e05d4b53b8605784f1eb571f0ff68/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05", size = 1630951, upload-time = "2025-07-29T05:52:15.955Z" }, + { url = "https://files.pythonhosted.org/packages/dd/7f/10c605dbd01c40e2b27df7ef9004bec75d156f0705141e11047ecdfe264d/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54", size = 1607595, upload-time = "2025-07-29T05:52:18.089Z" }, + { url = "https://files.pythonhosted.org/packages/66/f6/2560dcb01731c1d7df1d34b64de95bc4b3ed02bb78830fd82299c1eb314e/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02", size = 1695194, upload-time = "2025-07-29T05:52:20.255Z" }, + { url = "https://files.pythonhosted.org/packages/e7/02/ee105ae82dc2b981039fd25b0cf6eaa52b493731960f9bc861375a72b463/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0", size = 1710872, upload-time = "2025-07-29T05:52:22.769Z" }, + { url = "https://files.pythonhosted.org/packages/88/16/70c4e42ed6a04f78fb58d1a46500a6ce560741d13afde2a5f33840746a5f/aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09", size = 1640539, upload-time = "2025-07-29T05:52:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1d/a7eb5fa8a6967117c5c0ad5ab4b1dec0d21e178c89aa08bc442a0b836392/aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d", size = 430164, upload-time = "2025-07-29T05:52:27.905Z" }, + { url = "https://files.pythonhosted.org/packages/14/25/e0cf8793aedc41c6d7f2aad646a27e27bdacafe3b402bb373d7651c94d73/aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8", size = 453370, upload-time = "2025-07-29T05:52:29.936Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "alembic" +version = "1.16.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/52/72e791b75c6b1efa803e491f7cbab78e963695e76d4ada05385252927e76/alembic-1.16.4.tar.gz", hash = "sha256:efab6ada0dd0fae2c92060800e0bf5c1dc26af15a10e02fb4babff164b4725e2", size = 1968161, upload-time = "2025-07-10T16:17:20.192Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/62/96b5217b742805236614f05904541000f55422a6060a90d7fd4ce26c172d/alembic-1.16.4-py3-none-any.whl", hash = "sha256:b05e51e8e82efc1abd14ba2af6392897e145930c3e0a2faf2b0da2f7f7fd660d", size = 247026, upload-time = "2025-07-10T16:17:21.845Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, +] + [[package]] name = "appnope" version = "0.1.4" @@ -93,6 +257,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/9a/6872af94fc8e8072723946651e65f66e16a0ca0efec7806bce8c2e2483d1/asv_runner-0.2.1-py3-none-any.whl", hash = "sha256:655d466208ce311768071f5003a61611481b24b3ad5ac41fb8a6374197e647e9", size = 47660, upload-time = "2024-02-11T21:50:07.026Z" }, ] +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncer" +version = "0.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/67/7ea59c3e69eaeee42e7fc91a5be67ca5849c8979acac2b920249760c6af2/asyncer-0.0.8.tar.gz", hash = "sha256:a589d980f57e20efb07ed91d0dbe67f1d2fd343e7142c66d3a099f05c620739c", size = 18217, upload-time = "2024-08-24T23:15:36.449Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/04/15b6ca6b7842eda2748bda0a0af73f2d054e9344320f8bba01f994294bcb/asyncer-0.0.8-py3-none-any.whl", hash = "sha256:5920d48fc99c8f8f0f1576e1882f5022885589c5fcbc46ce4224ec3e53776eeb", size = 9209, upload-time = "2024-08-24T23:15:35.317Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "backoff" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001, upload-time = "2022-10-05T19:19:32.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, +] + [[package]] name = "beautifulsoup4" version = "4.13.4" @@ -334,6 +538,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, ] +[[package]] +name = "cloudpickle" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/39/069100b84d7418bc358d81669d5748efb14b9cceacd2f9c75f550424132f/cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64", size = 22113, upload-time = "2025-01-14T17:02:05.085Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/e8/64c37fadfc2816a7701fa8a6ed8d87327c7d54eacfbfb6edab14a2f2be75/cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e", size = 20992, upload-time = "2025-01-14T17:02:02.417Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -343,6 +556,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "colorlog" +version = "6.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624, upload-time = "2024-10-29T18:34:51.011Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424, upload-time = "2024-10-29T18:34:49.815Z" }, +] + [[package]] name = "comm" version = "0.2.3" @@ -695,6 +920,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, ] +[[package]] +name = "datasets" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill", marker = "python_full_version < '3.10'" }, + { name = "filelock", marker = "python_full_version < '3.10'" }, + { name = "fsspec", version = "2025.3.0", source = { registry = "https://pypi.org/simple" }, extra = ["http"], marker = "python_full_version < '3.10'" }, + { name = "huggingface-hub", marker = "python_full_version < '3.10'" }, + { name = "multiprocess", marker = "python_full_version < '3.10'" }, + { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pandas", marker = "python_full_version < '3.10'" }, + { name = "pyarrow", marker = "python_full_version < '3.10'" }, + { name = "pyyaml", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "tqdm", marker = "python_full_version < '3.10'" }, + { name = "xxhash", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/9d/348ed92110ba5f9b70b51ca1078d4809767a835aa2b7ce7e74ad2b98323d/datasets-4.0.0.tar.gz", hash = "sha256:9657e7140a9050db13443ba21cb5de185af8af944479b00e7ff1e00a61c8dbf1", size = 569566, upload-time = "2025-07-09T14:35:52.431Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/62/eb8157afb21bd229c864521c1ab4fa8e9b4f1b06bafdd8c4668a7a31b5dd/datasets-4.0.0-py3-none-any.whl", hash = "sha256:7ef95e62025fd122882dbce6cb904c8cd3fbc829de6669a5eb939c77d50e203d", size = 494825, upload-time = "2025-07-09T14:35:50.658Z" }, +] + [[package]] name = "datasmith" version = "0.0.1" @@ -702,6 +951,8 @@ source = { editable = "." } dependencies = [ { name = "asv" }, { name = "docker" }, + { name = "dspy", version = "2.6.27", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "dspy", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "gitpython" }, { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, @@ -744,6 +995,7 @@ dev = [ requires-dist = [ { name = "asv" }, { name = "docker" }, + { name = "dspy", specifier = ">=2.6.27" }, { name = "gitpython" }, { name = "numpy" }, { name = "pandas" }, @@ -845,6 +1097,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/72/ac643d909da2e50b1fb78143591079f21649f60572d8224be4ba4d795c2c/deptry-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9f9bbb92f95ada9ccfa5ecefee05ba3c39cfa0734b5483a3a1a3c4eeb9c99054", size = 1631828, upload-time = "2025-07-31T05:54:53.486Z" }, ] +[[package]] +name = "dill" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847, upload-time = "2024-01-27T23:42:16.145Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252, upload-time = "2024-01-27T23:42:14.239Z" }, +] + +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, +] + [[package]] name = "distlib" version = "0.4.0" @@ -854,6 +1124,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + [[package]] name = "docker" version = "7.1.0" @@ -868,6 +1147,81 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] +[[package]] +name = "dspy" +version = "2.6.27" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "anyio", marker = "python_full_version < '3.10'" }, + { name = "asyncer", marker = "python_full_version < '3.10'" }, + { name = "backoff", marker = "python_full_version < '3.10'" }, + { name = "cachetools", marker = "python_full_version < '3.10'" }, + { name = "cloudpickle", marker = "python_full_version < '3.10'" }, + { name = "datasets", marker = "python_full_version < '3.10'" }, + { name = "diskcache", marker = "python_full_version < '3.10'" }, + { name = "joblib", marker = "python_full_version < '3.10'" }, + { name = "json-repair", version = "0.44.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "litellm", marker = "python_full_version < '3.10'" }, + { name = "magicattr", marker = "python_full_version < '3.10'" }, + { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "openai", marker = "python_full_version < '3.10'" }, + { name = "optuna", marker = "python_full_version < '3.10'" }, + { name = "pandas", marker = "python_full_version < '3.10'" }, + { name = "pydantic", marker = "python_full_version < '3.10'" }, + { name = "regex", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "rich", marker = "python_full_version < '3.10'" }, + { name = "tenacity", marker = "python_full_version < '3.10'" }, + { name = "tqdm", marker = "python_full_version < '3.10'" }, + { name = "ujson", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/8a/f7ff1a6d3b5294678f13d17ecfc596f49a59e494b190e4e30f7dea7df1dc/dspy-2.6.27.tar.gz", hash = "sha256:de1c4f6f6d127e0efed894e1915dac40f5d5623e7f1cf3d749c98d790066477a", size = 234604, upload-time = "2025-06-03T17:47:13.411Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/bb/8a75d44bc1b54dea0fa0428eb52b13e7ee533b85841d2c53a53dfc360646/dspy-2.6.27-py3-none-any.whl", hash = "sha256:54e55fd6999b6a46e09b0e49e8c4b71be7dd56a881e66f7a60b8d657650c1a74", size = 297296, upload-time = "2025-06-03T17:47:11.526Z" }, +] + +[[package]] +name = "dspy" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "anyio", marker = "python_full_version >= '3.10'" }, + { name = "asyncer", marker = "python_full_version >= '3.10'" }, + { name = "backoff", marker = "python_full_version >= '3.10'" }, + { name = "cachetools", marker = "python_full_version >= '3.10'" }, + { name = "cloudpickle", marker = "python_full_version >= '3.10'" }, + { name = "diskcache", marker = "python_full_version >= '3.10'" }, + { name = "gepa", marker = "python_full_version >= '3.10'" }, + { name = "joblib", marker = "python_full_version >= '3.10'" }, + { name = "json-repair", version = "0.49.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "litellm", marker = "python_full_version >= '3.10'" }, + { name = "magicattr", marker = "python_full_version >= '3.10'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "openai", marker = "python_full_version >= '3.10'" }, + { name = "optuna", marker = "python_full_version >= '3.10'" }, + { name = "pydantic", marker = "python_full_version >= '3.10'" }, + { name = "regex", marker = "python_full_version >= '3.10'" }, + { name = "requests", marker = "python_full_version >= '3.10'" }, + { name = "rich", marker = "python_full_version >= '3.10'" }, + { name = "tenacity", marker = "python_full_version >= '3.10'" }, + { name = "tqdm", marker = "python_full_version >= '3.10'" }, + { name = "ujson", marker = "python_full_version >= '3.10'" }, + { name = "xxhash", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/cb/4bfb5345e230e33b0fa4f18c16fe646395a081a48c6feb314e6993a86bb1/dspy-3.0.1.tar.gz", hash = "sha256:92220584eb7c3587746cac76209f7f167dbf6f38f5f05a7019d610ededc1eede", size = 213285, upload-time = "2025-08-14T17:39:32.415Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b4/ef2706be57daf78562b8aa811cdfe184616becb6659522ace85919202b21/dspy-3.0.1-py3-none-any.whl", hash = "sha256:a9afb6eedaab063e9ca6d46840fad85b97ab45e79b4bf9371e6bf3a5666ef5c6", size = 259011, upload-time = "2025-08-14T17:39:30.901Z" }, +] + [[package]] name = "exceptiongroup" version = "1.3.0" @@ -963,6 +1317,157 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0f/64/9d606e66d498917cd7a2ff24f558010d42d6fd4576d9dd57f0bd98333f5a/fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042", size = 1130094, upload-time = "2025-08-14T16:28:12.048Z" }, ] +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", size = 81304, upload-time = "2025-06-09T22:59:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61", size = 47735, upload-time = "2025-06-09T22:59:48.133Z" }, + { url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d", size = 46775, upload-time = "2025-06-09T22:59:49.564Z" }, + { url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e", size = 224644, upload-time = "2025-06-09T22:59:51.35Z" }, + { url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9", size = 222125, upload-time = "2025-06-09T22:59:52.884Z" }, + { url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c", size = 233455, upload-time = "2025-06-09T22:59:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981", size = 227339, upload-time = "2025-06-09T22:59:56.187Z" }, + { url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615", size = 212969, upload-time = "2025-06-09T22:59:57.604Z" }, + { url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50", size = 222862, upload-time = "2025-06-09T22:59:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa", size = 222492, upload-time = "2025-06-09T23:00:01.026Z" }, + { url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577", size = 238250, upload-time = "2025-06-09T23:00:03.401Z" }, + { url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59", size = 218720, upload-time = "2025-06-09T23:00:05.282Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e", size = 232585, upload-time = "2025-06-09T23:00:07.962Z" }, + { url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd", size = 234248, upload-time = "2025-06-09T23:00:09.428Z" }, + { url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718", size = 221621, upload-time = "2025-06-09T23:00:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e", size = 39578, upload-time = "2025-06-09T23:00:13.526Z" }, + { url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464", size = 43830, upload-time = "2025-06-09T23:00:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b1/ee59496f51cd244039330015d60f13ce5a54a0f2bd8d79e4a4a375ab7469/frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630", size = 82434, upload-time = "2025-06-09T23:02:05.195Z" }, + { url = "https://files.pythonhosted.org/packages/75/e1/d518391ce36a6279b3fa5bc14327dde80bcb646bb50d059c6ca0756b8d05/frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71", size = 48232, upload-time = "2025-06-09T23:02:07.728Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8d/a0d04f28b6e821a9685c22e67b5fb798a5a7b68752f104bfbc2dccf080c4/frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44", size = 47186, upload-time = "2025-06-09T23:02:09.243Z" }, + { url = "https://files.pythonhosted.org/packages/93/3a/a5334c0535c8b7c78eeabda1579179e44fe3d644e07118e59a2276dedaf1/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878", size = 226617, upload-time = "2025-06-09T23:02:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/0a/67/8258d971f519dc3f278c55069a775096cda6610a267b53f6248152b72b2f/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb", size = 224179, upload-time = "2025-06-09T23:02:12.603Z" }, + { url = "https://files.pythonhosted.org/packages/fc/89/8225905bf889b97c6d935dd3aeb45668461e59d415cb019619383a8a7c3b/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6", size = 235783, upload-time = "2025-06-09T23:02:14.678Z" }, + { url = "https://files.pythonhosted.org/packages/54/6e/ef52375aa93d4bc510d061df06205fa6dcfd94cd631dd22956b09128f0d4/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35", size = 229210, upload-time = "2025-06-09T23:02:16.313Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/62c87d1a6547bfbcd645df10432c129100c5bd0fd92a384de6e3378b07c1/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87", size = 215994, upload-time = "2025-06-09T23:02:17.9Z" }, + { url = "https://files.pythonhosted.org/packages/45/d2/263fea1f658b8ad648c7d94d18a87bca7e8c67bd6a1bbf5445b1bd5b158c/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677", size = 225122, upload-time = "2025-06-09T23:02:19.479Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/7145e35d12fb368d92124f679bea87309495e2e9ddf14c6533990cb69218/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938", size = 224019, upload-time = "2025-06-09T23:02:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/44/1e/7dae8c54301beb87bcafc6144b9a103bfd2c8f38078c7902984c9a0c4e5b/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2", size = 239925, upload-time = "2025-06-09T23:02:22.466Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1e/99c93e54aa382e949a98976a73b9b20c3aae6d9d893f31bbe4991f64e3a8/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319", size = 220881, upload-time = "2025-06-09T23:02:24.521Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9c/ca5105fa7fb5abdfa8837581be790447ae051da75d32f25c8f81082ffc45/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890", size = 234046, upload-time = "2025-06-09T23:02:26.206Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4d/e99014756093b4ddbb67fb8f0df11fe7a415760d69ace98e2ac6d5d43402/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd", size = 235756, upload-time = "2025-06-09T23:02:27.79Z" }, + { url = "https://files.pythonhosted.org/packages/8b/72/a19a40bcdaa28a51add2aaa3a1a294ec357f36f27bd836a012e070c5e8a5/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb", size = 222894, upload-time = "2025-06-09T23:02:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/08/49/0042469993e023a758af81db68c76907cd29e847d772334d4d201cbe9a42/frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e", size = 39848, upload-time = "2025-06-09T23:02:31.413Z" }, + { url = "https://files.pythonhosted.org/packages/5a/45/827d86ee475c877f5f766fbc23fb6acb6fada9e52f1c9720e2ba3eae32da/frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63", size = 44102, upload-time = "2025-06-09T23:02:32.808Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + +[[package]] +name = "fsspec" +version = "2025.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/34/f4/5721faf47b8c499e776bc34c6a8fc17efdf7fdef0b00f398128bc5dcb4ac/fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972", size = 298491, upload-time = "2025-03-07T21:47:56.461Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/53/eb690efa8513166adef3e0669afd31e95ffde69fb3c52ec2ac7223ed6018/fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3", size = 193615, upload-time = "2025-03-07T21:47:54.809Z" }, +] + +[package.optional-dependencies] +http = [ + { name = "aiohttp", marker = "python_full_version < '3.10'" }, +] + +[[package]] +name = "fsspec" +version = "2025.7.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/02/0835e6ab9cfc03916fe3f78c0956cfcdb6ff2669ffa6651065d5ebf7fc98/fsspec-2025.7.0.tar.gz", hash = "sha256:786120687ffa54b8283d942929540d8bc5ccfa820deb555a2b5d0ed2b737bf58", size = 304432, upload-time = "2025-07-15T16:05:21.19Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, +] + +[[package]] +name = "gepa" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/0d/aa6065d7d59b3f10ff6818d527dada5a7179ac5643b666b6b6b71d11dab4/gepa-0.0.4.tar.gz", hash = "sha256:b3e020124c7d8a80c07595aca3b73647ec9151203d7166915ad62492b8459bd6", size = 32957, upload-time = "2025-08-14T05:08:36.792Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/c0/836c79f05113c96155e8de1bb8bf3631a9e7b3b75238c592d39460141ea8/gepa-0.0.4-py3-none-any.whl", hash = "sha256:53d275490d644855e90adf4eba1e3ace5c414c76ba0c0f22760b99a0e43984f9", size = 35191, upload-time = "2025-08-14T05:08:35.558Z" }, +] + [[package]] name = "gitdb" version = "4.0.12" @@ -988,6 +1493,139 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, ] +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/ed/6bfa4109fcb23a58819600392564fea69cdc6551ffd5e69ccf1d52a40cbc/greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c", size = 271061, upload-time = "2025-08-07T13:17:15.373Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fc/102ec1a2fc015b3a7652abab7acf3541d58c04d3d17a8d3d6a44adae1eb1/greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590", size = 629475, upload-time = "2025-08-07T13:42:54.009Z" }, + { url = "https://files.pythonhosted.org/packages/c5/26/80383131d55a4ac0fb08d71660fd77e7660b9db6bdb4e8884f46d9f2cc04/greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c", size = 640802, upload-time = "2025-08-07T13:45:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/9f/7c/e7833dbcd8f376f3326bd728c845d31dcde4c84268d3921afcae77d90d08/greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b", size = 636703, upload-time = "2025-08-07T13:53:12.622Z" }, + { url = "https://files.pythonhosted.org/packages/e9/49/547b93b7c0428ede7b3f309bc965986874759f7d89e4e04aeddbc9699acb/greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31", size = 635417, upload-time = "2025-08-07T13:18:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, + { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, + { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, + { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, + { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, + { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, + { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c", size = 269859, upload-time = "2025-08-07T13:16:16.003Z" }, + { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d", size = 627610, upload-time = "2025-08-07T13:43:01.345Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/9476decef51a0844195f99ed5dc611d212e9b3515512ecdf7321543a7225/greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58", size = 639417, upload-time = "2025-08-07T13:45:32.094Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/849b9159cbb176f8c0af5caaff1faffdece7a8417fcc6fe1869770e33e21/greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4", size = 634751, upload-time = "2025-08-07T13:53:18.848Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d3/844e714a9bbd39034144dca8b658dcd01839b72bb0ec7d8014e33e3705f0/greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433", size = 634020, upload-time = "2025-08-07T13:18:36.841Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df", size = 582817, upload-time = "2025-08-07T13:18:35.48Z" }, + { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594", size = 1111985, upload-time = "2025-08-07T13:42:42.425Z" }, + { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98", size = 1136137, upload-time = "2025-08-07T13:18:26.168Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b", size = 281400, upload-time = "2025-08-07T14:02:20.263Z" }, + { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb", size = 298533, upload-time = "2025-08-07T13:56:34.168Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.1.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/0a/a0f56735940fde6dd627602fec9ab3bad23f66a272397560abd65aba416e/hf_xet-1.1.7.tar.gz", hash = "sha256:20cec8db4561338824a3b5f8c19774055b04a8df7fff0cb1ff2cb1a0c1607b80", size = 477719, upload-time = "2025-08-06T00:30:55.741Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/7c/8d7803995caf14e7d19a392a486a040f923e2cfeff824e9b800b92072f76/hf_xet-1.1.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:60dae4b44d520819e54e216a2505685248ec0adbdb2dd4848b17aa85a0375cde", size = 2761743, upload-time = "2025-08-06T00:30:50.634Z" }, + { url = "https://files.pythonhosted.org/packages/51/a3/fa5897099454aa287022a34a30e68dbff0e617760f774f8bd1db17f06bd4/hf_xet-1.1.7-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b109f4c11e01c057fc82004c9e51e6cdfe2cb230637644ade40c599739067b2e", size = 2624331, upload-time = "2025-08-06T00:30:49.212Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/2446a132267e60b8a48b2e5835d6e24fd988000d0f5b9b15ebd6d64ef769/hf_xet-1.1.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efaaf1a5a9fc3a501d3e71e88a6bfebc69ee3a716d0e713a931c8b8d920038f", size = 3183844, upload-time = "2025-08-06T00:30:47.582Z" }, + { url = "https://files.pythonhosted.org/packages/20/8f/ccc670616bb9beee867c6bb7139f7eab2b1370fe426503c25f5cbb27b148/hf_xet-1.1.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:751571540f9c1fbad9afcf222a5fb96daf2384bf821317b8bfb0c59d86078513", size = 3074209, upload-time = "2025-08-06T00:30:45.509Z" }, + { url = "https://files.pythonhosted.org/packages/21/0a/4c30e1eb77205565b854f5e4a82cf1f056214e4dc87f2918ebf83d47ae14/hf_xet-1.1.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:18b61bbae92d56ae731b92087c44efcac216071182c603fc535f8e29ec4b09b8", size = 3239602, upload-time = "2025-08-06T00:30:52.41Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1e/fc7e9baf14152662ef0b35fa52a6e889f770a7ed14ac239de3c829ecb47e/hf_xet-1.1.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:713f2bff61b252f8523739969f247aa354ad8e6d869b8281e174e2ea1bb8d604", size = 3348184, upload-time = "2025-08-06T00:30:54.105Z" }, + { url = "https://files.pythonhosted.org/packages/a3/73/e354eae84ceff117ec3560141224724794828927fcc013c5b449bf0b8745/hf_xet-1.1.7-cp37-abi3-win_amd64.whl", hash = "sha256:2e356da7d284479ae0f1dea3cf5a2f74fdf925d6dca84ac4341930d892c7cb34", size = 2820008, upload-time = "2025-08-06T00:30:57.056Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "0.34.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec", version = "2025.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "fsspec", version = "2025.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, +] + [[package]] name = "identify" version = "2.6.13" @@ -1165,6 +1803,137 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, ] +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/7e/4011b5c77bec97cb2b572f566220364e3e21b51c48c5bd9c4a9c26b41b67/jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303", size = 317215, upload-time = "2025-05-18T19:03:04.303Z" }, + { url = "https://files.pythonhosted.org/packages/8a/4f/144c1b57c39692efc7ea7d8e247acf28e47d0912800b34d0ad815f6b2824/jiter-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32bb468e3af278f095d3fa5b90314728a6916d89ba3d0ffb726dd9bf7367285e", size = 322814, upload-time = "2025-05-18T19:03:06.433Z" }, + { url = "https://files.pythonhosted.org/packages/63/1f/db977336d332a9406c0b1f0b82be6f71f72526a806cbb2281baf201d38e3/jiter-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8b3e0068c26ddedc7abc6fac37da2d0af16b921e288a5a613f4b86f050354f", size = 345237, upload-time = "2025-05-18T19:03:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1c/aa30a4a775e8a672ad7f21532bdbfb269f0706b39c6ff14e1f86bdd9e5ff/jiter-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:286299b74cc49e25cd42eea19b72aa82c515d2f2ee12d11392c56d8701f52224", size = 370999, upload-time = "2025-05-18T19:03:09.338Z" }, + { url = "https://files.pythonhosted.org/packages/35/df/f8257abc4207830cb18880781b5f5b716bad5b2a22fb4330cfd357407c5b/jiter-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ed5649ceeaeffc28d87fb012d25a4cd356dcd53eff5acff1f0466b831dda2a7", size = 491109, upload-time = "2025-05-18T19:03:11.13Z" }, + { url = "https://files.pythonhosted.org/packages/06/76/9e1516fd7b4278aa13a2cc7f159e56befbea9aa65c71586305e7afa8b0b3/jiter-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ab0051160cb758a70716448908ef14ad476c3774bd03ddce075f3c1f90a3d6", size = 388608, upload-time = "2025-05-18T19:03:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/6d/64/67750672b4354ca20ca18d3d1ccf2c62a072e8a2d452ac3cf8ced73571ef/jiter-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03997d2f37f6b67d2f5c475da4412be584e1cec273c1cfc03d642c46db43f8cf", size = 352454, upload-time = "2025-05-18T19:03:14.741Z" }, + { url = "https://files.pythonhosted.org/packages/96/4d/5c4e36d48f169a54b53a305114be3efa2bbffd33b648cd1478a688f639c1/jiter-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c404a99352d839fed80d6afd6c1d66071f3bacaaa5c4268983fc10f769112e90", size = 391833, upload-time = "2025-05-18T19:03:16.426Z" }, + { url = "https://files.pythonhosted.org/packages/0b/de/ce4a6166a78810bd83763d2fa13f85f73cbd3743a325469a4a9289af6dae/jiter-0.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66e989410b6666d3ddb27a74c7e50d0829704ede652fd4c858e91f8d64b403d0", size = 523646, upload-time = "2025-05-18T19:03:17.704Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a6/3bc9acce53466972964cf4ad85efecb94f9244539ab6da1107f7aed82934/jiter-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b532d3af9ef4f6374609a3bcb5e05a1951d3bf6190dc6b176fdb277c9bbf15ee", size = 514735, upload-time = "2025-05-18T19:03:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/b4/d8/243c2ab8426a2a4dea85ba2a2ba43df379ccece2145320dfd4799b9633c5/jiter-0.10.0-cp310-cp310-win32.whl", hash = "sha256:da9be20b333970e28b72edc4dff63d4fec3398e05770fb3205f7fb460eb48dd4", size = 210747, upload-time = "2025-05-18T19:03:21.184Z" }, + { url = "https://files.pythonhosted.org/packages/37/7a/8021bd615ef7788b98fc76ff533eaac846322c170e93cbffa01979197a45/jiter-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:f59e533afed0c5b0ac3eba20d2548c4a550336d8282ee69eb07b37ea526ee4e5", size = 207484, upload-time = "2025-05-18T19:03:23.046Z" }, + { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, + { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, + { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, + { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, + { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, + { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, + { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, + { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, + { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, + { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, + { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, + { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, + { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617, upload-time = "2025-05-18T19:04:02.078Z" }, + { url = "https://files.pythonhosted.org/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947, upload-time = "2025-05-18T19:04:03.347Z" }, + { url = "https://files.pythonhosted.org/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618, upload-time = "2025-05-18T19:04:04.709Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829, upload-time = "2025-05-18T19:04:06.912Z" }, + { url = "https://files.pythonhosted.org/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034, upload-time = "2025-05-18T19:04:08.222Z" }, + { url = "https://files.pythonhosted.org/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529, upload-time = "2025-05-18T19:04:09.566Z" }, + { url = "https://files.pythonhosted.org/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671, upload-time = "2025-05-18T19:04:10.98Z" }, + { url = "https://files.pythonhosted.org/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864, upload-time = "2025-05-18T19:04:12.722Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989, upload-time = "2025-05-18T19:04:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495, upload-time = "2025-05-18T19:04:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289, upload-time = "2025-05-18T19:04:17.541Z" }, + { url = "https://files.pythonhosted.org/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074, upload-time = "2025-05-18T19:04:19.21Z" }, + { url = "https://files.pythonhosted.org/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225, upload-time = "2025-05-18T19:04:20.583Z" }, + { url = "https://files.pythonhosted.org/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235, upload-time = "2025-05-18T19:04:22.363Z" }, + { url = "https://files.pythonhosted.org/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278, upload-time = "2025-05-18T19:04:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866, upload-time = "2025-05-18T19:04:24.891Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772, upload-time = "2025-05-18T19:04:26.161Z" }, + { url = "https://files.pythonhosted.org/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534, upload-time = "2025-05-18T19:04:27.495Z" }, + { url = "https://files.pythonhosted.org/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087, upload-time = "2025-05-18T19:04:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694, upload-time = "2025-05-18T19:04:30.183Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992, upload-time = "2025-05-18T19:04:32.028Z" }, + { url = "https://files.pythonhosted.org/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723, upload-time = "2025-05-18T19:04:33.467Z" }, + { url = "https://files.pythonhosted.org/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215, upload-time = "2025-05-18T19:04:34.827Z" }, + { url = "https://files.pythonhosted.org/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762, upload-time = "2025-05-18T19:04:36.19Z" }, + { url = "https://files.pythonhosted.org/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427, upload-time = "2025-05-18T19:04:37.544Z" }, + { url = "https://files.pythonhosted.org/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127, upload-time = "2025-05-18T19:04:38.837Z" }, + { url = "https://files.pythonhosted.org/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527, upload-time = "2025-05-18T19:04:40.612Z" }, + { url = "https://files.pythonhosted.org/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213, upload-time = "2025-05-18T19:04:41.894Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/aced428e2bd3c6c1132f67c5a708f9e7fd161d0ca8f8c5862b17b93cdf0a/jiter-0.10.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bd6292a43c0fc09ce7c154ec0fa646a536b877d1e8f2f96c19707f65355b5a4d", size = 317665, upload-time = "2025-05-18T19:04:43.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/47d42f15d53ed382aef8212a737101ae2720e3697a954f9b95af06d34e89/jiter-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39de429dcaeb6808d75ffe9effefe96a4903c6a4b376b2f6d08d77c1aaee2f18", size = 312152, upload-time = "2025-05-18T19:04:44.797Z" }, + { url = "https://files.pythonhosted.org/packages/7b/02/aae834228ef4834fc18718724017995ace8da5f70aa1ec225b9bc2b2d7aa/jiter-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ce124f13a7a616fad3bb723f2bfb537d78239d1f7f219566dc52b6f2a9e48d", size = 346708, upload-time = "2025-05-18T19:04:46.127Z" }, + { url = "https://files.pythonhosted.org/packages/35/d4/6ff39dee2d0a9abd69d8a3832ce48a3aa644eed75e8515b5ff86c526ca9a/jiter-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:166f3606f11920f9a1746b2eea84fa2c0a5d50fd313c38bdea4edc072000b0af", size = 371360, upload-time = "2025-05-18T19:04:47.448Z" }, + { url = "https://files.pythonhosted.org/packages/a9/67/c749d962b4eb62445867ae4e64a543cbb5d63cc7d78ada274ac515500a7f/jiter-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28dcecbb4ba402916034fc14eba7709f250c4d24b0c43fc94d187ee0580af181", size = 492105, upload-time = "2025-05-18T19:04:48.792Z" }, + { url = "https://files.pythonhosted.org/packages/f6/d3/8fe1b1bae5161f27b1891c256668f598fa4c30c0a7dacd668046a6215fca/jiter-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86c5aa6910f9bebcc7bc4f8bc461aff68504388b43bfe5e5c0bd21efa33b52f4", size = 389577, upload-time = "2025-05-18T19:04:50.13Z" }, + { url = "https://files.pythonhosted.org/packages/ef/28/ecb19d789b4777898a4252bfaac35e3f8caf16c93becd58dcbaac0dc24ad/jiter-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceeb52d242b315d7f1f74b441b6a167f78cea801ad7c11c36da77ff2d42e8a28", size = 353849, upload-time = "2025-05-18T19:04:51.443Z" }, + { url = "https://files.pythonhosted.org/packages/77/69/261f798f84790da6482ebd8c87ec976192b8c846e79444d0a2e0d33ebed8/jiter-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ff76d8887c8c8ee1e772274fcf8cc1071c2c58590d13e33bd12d02dc9a560397", size = 392029, upload-time = "2025-05-18T19:04:52.792Z" }, + { url = "https://files.pythonhosted.org/packages/cb/08/b8d15140d4d91f16faa2f5d416c1a71ab1bbe2b66c57197b692d04c0335f/jiter-0.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a9be4d0fa2b79f7222a88aa488bd89e2ae0a0a5b189462a12def6ece2faa45f1", size = 524386, upload-time = "2025-05-18T19:04:54.203Z" }, + { url = "https://files.pythonhosted.org/packages/9b/1d/23c41765cc95c0e23ac492a88450d34bf0fd87a37218d1b97000bffe0f53/jiter-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ab7fd8738094139b6c1ab1822d6f2000ebe41515c537235fd45dabe13ec9324", size = 515234, upload-time = "2025-05-18T19:04:55.838Z" }, + { url = "https://files.pythonhosted.org/packages/9f/14/381d8b151132e79790579819c3775be32820569f23806769658535fe467f/jiter-0.10.0-cp39-cp39-win32.whl", hash = "sha256:5f51e048540dd27f204ff4a87f5d79294ea0aa3aa552aca34934588cf27023cf", size = 211436, upload-time = "2025-05-18T19:04:57.183Z" }, + { url = "https://files.pythonhosted.org/packages/59/66/f23ae51dea8ee8ce429027b60008ca895d0fa0704f0c7fe5f09014a6cffb/jiter-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b28302349dc65703a9e4ead16f163b1c339efffbe1049c30a44b001a2a4fff9", size = 208777, upload-time = "2025-05-18T19:04:58.454Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/fe/0f5a938c54105553436dbff7a61dc4fed4b1b2c98852f8833beaf4d5968f/joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444", size = 330475, upload-time = "2025-05-23T12:04:37.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/4f/1195bbac8e0c2acc5f740661631d8d750dc38d4a32b23ee5df3cde6f4e0d/joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a", size = 307746, upload-time = "2025-05-23T12:04:35.124Z" }, +] + +[[package]] +name = "json-repair" +version = "0.44.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/6b/ed6e92efc5acfbc9c35ccae1676b70e4adb1552421e64f838c2a3f097d9a/json_repair-0.44.1.tar.gz", hash = "sha256:1130eb9733b868dac1340b43cb2effebb519ae6d52dd2d0728c6cca517f1e0b4", size = 32886, upload-time = "2025-04-30T16:09:38.54Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/b4/3cbd27a3240b2962c3b87bbb1c20eb6c56e5b26cde61f141f86ca98e2f68/json_repair-0.44.1-py3-none-any.whl", hash = "sha256:51d82532c3b8263782a301eb7904c75dce5fee8c0d1aba490287fc0ab779ac50", size = 22478, upload-time = "2025-04-30T16:09:37.303Z" }, +] + +[[package]] +name = "json-repair" +version = "0.49.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/db/5e6671347db8a55a52aec330017b7d2f0c4d49ac4b374018a912619dd2ee/json_repair-0.49.0.tar.gz", hash = "sha256:6a57563384da509c231a27bd87503eeaf5964f38d11a2b5ac808fe91431e1e61", size = 35108, upload-time = "2025-08-10T08:39:14.892Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/2a/d307b12ece7e3c82bade236b02621219310bb87ebd06ff9b6e3185bac7b8/json_repair-0.49.0-py3-none-any.whl", hash = "sha256:84b39814689d6b48c403f1fe6abdae976b64ffe2dc0ba5ad61a199bd23354391", size = 26549, upload-time = "2025-08-10T08:39:13.627Z" }, +] + [[package]] name = "json5" version = "0.12.1" @@ -1174,6 +1943,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size = 36119, upload-time = "2025-08-12T19:47:41.131Z" }, ] +[[package]] +name = "jsonschema" +version = "4.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/00/a297a868e9d0784450faa7365c2172a7d6110c763e30ba861867c32ae6a9/jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f", size = 356830, upload-time = "2025-07-18T15:39:45.11Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + [[package]] name = "jupyter-client" version = "8.6.3" @@ -1421,6 +2217,149 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, ] +[[package]] +name = "litellm" +version = "1.75.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "httpx" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "tiktoken" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/4e/48e3d6de19afe713223e3bc7009a2003501420de2a5d823c569cefbd9731/litellm-1.75.8.tar.gz", hash = "sha256:92061bd263ff8c33c8fff70ba92cd046adb7ea041a605826a915d108742fe59e", size = 10140384, upload-time = "2025-08-16T21:42:24.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/82/c4d00fbeafd93c00dab6ea03f33cadd6a97adeb720ba1d89fc319e5cb10b/litellm-1.75.8-py3-none-any.whl", hash = "sha256:0bf004488df8506381ec6e35e1486e2870e8d578a7c3f2427cd497558ce07a2e", size = 8916305, upload-time = "2025-08-16T21:42:21.387Z" }, +] + +[[package]] +name = "magicattr" +version = "0.1.6" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/7e/76b7e0c391bee7e9273725c29c8fe41c4df62a215ce58aa8e3518baee0bb/magicattr-0.1.6-py2.py3-none-any.whl", hash = "sha256:d96b18ee45b5ee83b09c17e15d3459a64de62d538808c2f71182777dd9dbbbdf", size = 4664, upload-time = "2022-01-25T16:56:47.074Z" }, +] + +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + [[package]] name = "matplotlib" version = "3.9.4" @@ -1576,6 +2515,155 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/6b/86f353088c1358e76fd30b0146947fddecee812703b604ee901e85cd2a80/multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f", size = 77054, upload-time = "2025-08-11T12:06:02.99Z" }, + { url = "https://files.pythonhosted.org/packages/19/5d/c01dc3d3788bb877bd7f5753ea6eb23c1beeca8044902a8f5bfb54430f63/multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb", size = 44914, upload-time = "2025-08-11T12:06:05.264Z" }, + { url = "https://files.pythonhosted.org/packages/46/44/964dae19ea42f7d3e166474d8205f14bb811020e28bc423d46123ddda763/multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495", size = 44601, upload-time = "2025-08-11T12:06:06.627Z" }, + { url = "https://files.pythonhosted.org/packages/31/20/0616348a1dfb36cb2ab33fc9521de1f27235a397bf3f59338e583afadd17/multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8", size = 224821, upload-time = "2025-08-11T12:06:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/14/26/5d8923c69c110ff51861af05bd27ca6783011b96725d59ccae6d9daeb627/multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7", size = 242608, upload-time = "2025-08-11T12:06:09.697Z" }, + { url = "https://files.pythonhosted.org/packages/5c/cc/e2ad3ba9459aa34fa65cf1f82a5c4a820a2ce615aacfb5143b8817f76504/multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796", size = 222324, upload-time = "2025-08-11T12:06:10.905Z" }, + { url = "https://files.pythonhosted.org/packages/19/db/4ed0f65701afbc2cb0c140d2d02928bb0fe38dd044af76e58ad7c54fd21f/multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db", size = 253234, upload-time = "2025-08-11T12:06:12.658Z" }, + { url = "https://files.pythonhosted.org/packages/94/c1/5160c9813269e39ae14b73debb907bfaaa1beee1762da8c4fb95df4764ed/multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0", size = 251613, upload-time = "2025-08-11T12:06:13.97Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/48d1bd111fc2f8fb98b2ed7f9a115c55a9355358432a19f53c0b74d8425d/multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877", size = 241649, upload-time = "2025-08-11T12:06:15.204Z" }, + { url = "https://files.pythonhosted.org/packages/85/2a/f7d743df0019408768af8a70d2037546a2be7b81fbb65f040d76caafd4c5/multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace", size = 239238, upload-time = "2025-08-11T12:06:16.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b8/4f4bb13323c2d647323f7919201493cf48ebe7ded971717bfb0f1a79b6bf/multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6", size = 233517, upload-time = "2025-08-11T12:06:18.107Z" }, + { url = "https://files.pythonhosted.org/packages/33/29/4293c26029ebfbba4f574febd2ed01b6f619cfa0d2e344217d53eef34192/multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb", size = 243122, upload-time = "2025-08-11T12:06:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/20/60/a1c53628168aa22447bfde3a8730096ac28086704a0d8c590f3b63388d0c/multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb", size = 248992, upload-time = "2025-08-11T12:06:20.661Z" }, + { url = "https://files.pythonhosted.org/packages/a3/3b/55443a0c372f33cae5d9ec37a6a973802884fa0ab3586659b197cf8cc5e9/multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987", size = 243708, upload-time = "2025-08-11T12:06:21.891Z" }, + { url = "https://files.pythonhosted.org/packages/7c/60/a18c6900086769312560b2626b18e8cca22d9e85b1186ba77f4755b11266/multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f", size = 237498, upload-time = "2025-08-11T12:06:23.206Z" }, + { url = "https://files.pythonhosted.org/packages/11/3d/8bdd8bcaff2951ce2affccca107a404925a2beafedd5aef0b5e4a71120a6/multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f", size = 41415, upload-time = "2025-08-11T12:06:24.77Z" }, + { url = "https://files.pythonhosted.org/packages/c0/53/cab1ad80356a4cd1b685a254b680167059b433b573e53872fab245e9fc95/multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0", size = 46046, upload-time = "2025-08-11T12:06:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/cf/9a/874212b6f5c1c2d870d0a7adc5bb4cfe9b0624fa15cdf5cf757c0f5087ae/multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729", size = 43147, upload-time = "2025-08-11T12:06:27.534Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, + { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, + { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, + { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, + { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, + { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, + { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, + { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, + { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, + { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, + { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, + { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, + { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, + { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, + { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d3/f04c5db316caee9b5b2cbba66270b358c922a959855995bedde87134287c/multidict-6.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:af7618b591bae552b40dbb6f93f5518328a949dac626ee75927bba1ecdeea9f4", size = 76977, upload-time = "2025-08-11T12:08:16.667Z" }, + { url = "https://files.pythonhosted.org/packages/70/39/a6200417d883e510728ab3caec02d3b66ff09e1c85e0aab2ba311abfdf06/multidict-6.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b6819f83aef06f560cb15482d619d0e623ce9bf155115150a85ab11b8342a665", size = 44878, upload-time = "2025-08-11T12:08:18.157Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/815be31ed35571b137d65232816f61513fcd97b2717d6a9d7800b5a0c6e0/multidict-6.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d09384e75788861e046330308e7af54dd306aaf20eb760eb1d0de26b2bea2cb", size = 44546, upload-time = "2025-08-11T12:08:19.694Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f1/21b5bff6a8c3e2aff56956c241941ace6b8820e1abe6b12d3c52868a773d/multidict-6.6.4-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:a59c63061f1a07b861c004e53869eb1211ffd1a4acbca330e3322efa6dd02978", size = 223020, upload-time = "2025-08-11T12:08:21.554Z" }, + { url = "https://files.pythonhosted.org/packages/15/59/37083f1dd3439979a0ffeb1906818d978d88b4cc7f4600a9f89b1cb6713c/multidict-6.6.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350f6b0fe1ced61e778037fdc7613f4051c8baf64b1ee19371b42a3acdb016a0", size = 240528, upload-time = "2025-08-11T12:08:23.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/f0/f054d123c87784307a27324c829eb55bcfd2e261eb785fcabbd832c8dc4a/multidict-6.6.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c5cbac6b55ad69cb6aa17ee9343dfbba903118fd530348c330211dc7aa756d1", size = 219540, upload-time = "2025-08-11T12:08:24.965Z" }, + { url = "https://files.pythonhosted.org/packages/e8/26/8f78ce17b7118149c17f238f28fba2a850b660b860f9b024a34d0191030f/multidict-6.6.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:630f70c32b8066ddfd920350bc236225814ad94dfa493fe1910ee17fe4365cbb", size = 251182, upload-time = "2025-08-11T12:08:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/00/c3/a21466322d69f6594fe22d9379200f99194d21c12a5bbf8c2a39a46b83b6/multidict-6.6.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8d4916a81697faec6cb724a273bd5457e4c6c43d82b29f9dc02c5542fd21fc9", size = 249371, upload-time = "2025-08-11T12:08:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8e/2e673124eb05cf8dc82e9265eccde01a36bcbd3193e27799b8377123c976/multidict-6.6.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e42332cf8276bb7645d310cdecca93a16920256a5b01bebf747365f86a1675b", size = 239235, upload-time = "2025-08-11T12:08:29.937Z" }, + { url = "https://files.pythonhosted.org/packages/2b/2d/bdd9f05e7c89e30a4b0e4faf0681a30748f8d1310f68cfdc0e3571e75bd5/multidict-6.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f3be27440f7644ab9a13a6fc86f09cdd90b347c3c5e30c6d6d860de822d7cb53", size = 237410, upload-time = "2025-08-11T12:08:31.872Z" }, + { url = "https://files.pythonhosted.org/packages/46/4c/3237b83f8ca9a2673bb08fc340c15da005a80f5cc49748b587c8ae83823b/multidict-6.6.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:21f216669109e02ef3e2415ede07f4f8987f00de8cdfa0cc0b3440d42534f9f0", size = 232979, upload-time = "2025-08-11T12:08:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/55/a6/a765decff625ae9bc581aed303cd1837955177dafc558859a69f56f56ba8/multidict-6.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d9890d68c45d1aeac5178ded1d1cccf3bc8d7accf1f976f79bf63099fb16e4bd", size = 240979, upload-time = "2025-08-11T12:08:35.02Z" }, + { url = "https://files.pythonhosted.org/packages/6b/2d/9c75975cb0c66ea33cae1443bb265b2b3cd689bffcbc68872565f401da23/multidict-6.6.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:edfdcae97cdc5d1a89477c436b61f472c4d40971774ac4729c613b4b133163cb", size = 246849, upload-time = "2025-08-11T12:08:37.038Z" }, + { url = "https://files.pythonhosted.org/packages/3e/71/d21ac0843c1d8751fb5dcf8a1f436625d39d4577bc27829799d09b419af7/multidict-6.6.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0b2e886624be5773e69cf32bcb8534aecdeb38943520b240fed3d5596a430f2f", size = 241798, upload-time = "2025-08-11T12:08:38.669Z" }, + { url = "https://files.pythonhosted.org/packages/94/3d/1d8911e53092837bd11b1c99d71de3e2a9a26f8911f864554677663242aa/multidict-6.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:be5bf4b3224948032a845d12ab0f69f208293742df96dc14c4ff9b09e508fc17", size = 235315, upload-time = "2025-08-11T12:08:40.266Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/4b758df96376f73e936b1942c6c2dfc17e37ed9d5ff3b01a811496966ca0/multidict-6.6.4-cp39-cp39-win32.whl", hash = "sha256:10a68a9191f284fe9d501fef4efe93226e74df92ce7a24e301371293bd4918ae", size = 41434, upload-time = "2025-08-11T12:08:41.965Z" }, + { url = "https://files.pythonhosted.org/packages/58/16/f1dfa2a0f25f2717a5e9e5fe8fd30613f7fe95e3530cec8d11f5de0b709c/multidict-6.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee25f82f53262f9ac93bd7e58e47ea1bdcc3393cef815847e397cba17e284210", size = 46186, upload-time = "2025-08-11T12:08:43.367Z" }, + { url = "https://files.pythonhosted.org/packages/88/7d/a0568bac65438c494cb6950b29f394d875a796a237536ac724879cf710c9/multidict-6.6.4-cp39-cp39-win_arm64.whl", hash = "sha256:f9867e55590e0855bcec60d4f9a092b69476db64573c9fe17e92b0c50614c16a", size = 43115, upload-time = "2025-08-11T12:08:45.126Z" }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, +] + +[[package]] +name = "multiprocess" +version = "0.70.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603, upload-time = "2024-01-28T18:52:34.85Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee", size = 134980, upload-time = "2024-01-28T18:52:15.731Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec", size = 134982, upload-time = "2024-01-28T18:52:17.783Z" }, + { url = "https://files.pythonhosted.org/packages/d8/94/8638a89f93c80df329116e6781a060506c7e91e1f4370dc831e9d17a041d/multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41", size = 133497, upload-time = "2024-01-28T18:52:22.644Z" }, + { url = "https://files.pythonhosted.org/packages/89/21/222066f6bb8d8af287923ae3bd26cf4699a9ce020228ac273caca1de8250/multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a", size = 133498, upload-time = "2024-01-28T18:52:24.576Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824, upload-time = "2024-01-28T18:52:26.062Z" }, + { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519, upload-time = "2024-01-28T18:52:28.115Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741, upload-time = "2024-01-28T18:52:29.395Z" }, + { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628, upload-time = "2024-01-28T18:52:30.853Z" }, + { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351, upload-time = "2024-01-28T18:52:31.981Z" }, +] + [[package]] name = "mypy" version = "1.17.1" @@ -1859,6 +2947,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/e3/6690b3f85a05506733c7e90b577e4762517404ea78bab2ca3a5cb1aeb78d/numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619", size = 12977811, upload-time = "2025-07-24T21:29:18.234Z" }, ] +[[package]] +name = "openai" +version = "1.99.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/d2/ef89c6f3f36b13b06e271d3cc984ddd2f62508a0972c1cbcc8485a6644ff/openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92", size = 506992, upload-time = "2025-08-12T02:31:10.054Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/fb/df274ca10698ee77b07bff952f302ea627cc12dac6b85289485dd77db6de/openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a", size = 786816, upload-time = "2025-08-12T02:31:08.34Z" }, +] + +[[package]] +name = "optuna" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alembic" }, + { name = "colorlog" }, + { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "sqlalchemy" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/e0/b303190ae8032d12f320a24c42af04038bacb1f3b17ede354dd1044a5642/optuna-4.4.0.tar.gz", hash = "sha256:a9029f6a92a1d6c8494a94e45abd8057823b535c2570819072dbcdc06f1c1da4", size = 467708, upload-time = "2025-06-16T05:13:00.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/5e/068798a8c7087863e7772e9363a880ab13fe55a5a7ede8ec42fab8a1acbb/optuna-4.4.0-py3-none-any.whl", hash = "sha256:fad8d9c5d5af993ae1280d6ce140aecc031c514a44c3b639d8c8658a8b7920ea", size = 395949, upload-time = "2025-06-16T05:12:58.37Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -2149,6 +3276,111 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, ] +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, + { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, + { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, + { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, + { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/6c/39/8ea9bcfaaff16fd0b0fc901ee522e24c9ec44b4ca0229cfffb8066a06959/propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5", size = 74678, upload-time = "2025-06-09T22:55:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/d3/85/cab84c86966e1d354cf90cdc4ba52f32f99a5bca92a1529d666d957d7686/propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4", size = 43829, upload-time = "2025-06-09T22:55:42.417Z" }, + { url = "https://files.pythonhosted.org/packages/23/f7/9cb719749152d8b26d63801b3220ce2d3931312b2744d2b3a088b0ee9947/propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2", size = 43729, upload-time = "2025-06-09T22:55:43.651Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a2/0b2b5a210ff311260002a315f6f9531b65a36064dfb804655432b2f7d3e3/propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d", size = 204483, upload-time = "2025-06-09T22:55:45.327Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e0/7aff5de0c535f783b0c8be5bdb750c305c1961d69fbb136939926e155d98/propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec", size = 217425, upload-time = "2025-06-09T22:55:46.729Z" }, + { url = "https://files.pythonhosted.org/packages/92/1d/65fa889eb3b2a7d6e4ed3c2b568a9cb8817547a1450b572de7bf24872800/propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701", size = 214723, upload-time = "2025-06-09T22:55:48.342Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e2/eecf6989870988dfd731de408a6fa366e853d361a06c2133b5878ce821ad/propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef", size = 200166, upload-time = "2025-06-09T22:55:49.775Z" }, + { url = "https://files.pythonhosted.org/packages/12/06/c32be4950967f18f77489268488c7cdc78cbfc65a8ba8101b15e526b83dc/propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1", size = 194004, upload-time = "2025-06-09T22:55:51.335Z" }, + { url = "https://files.pythonhosted.org/packages/46/6c/17b521a6b3b7cbe277a4064ff0aa9129dd8c89f425a5a9b6b4dd51cc3ff4/propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886", size = 203075, upload-time = "2025-06-09T22:55:52.681Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/3bdba2b736b3e45bc0e40f4370f745b3e711d439ffbffe3ae416393eece9/propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b", size = 195407, upload-time = "2025-06-09T22:55:54.048Z" }, + { url = "https://files.pythonhosted.org/packages/29/bd/760c5c6a60a4a2c55a421bc34a25ba3919d49dee411ddb9d1493bb51d46e/propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb", size = 196045, upload-time = "2025-06-09T22:55:55.485Z" }, + { url = "https://files.pythonhosted.org/packages/76/58/ced2757a46f55b8c84358d6ab8de4faf57cba831c51e823654da7144b13a/propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea", size = 208432, upload-time = "2025-06-09T22:55:56.884Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ec/d98ea8d5a4d8fe0e372033f5254eddf3254344c0c5dc6c49ab84349e4733/propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb", size = 210100, upload-time = "2025-06-09T22:55:58.498Z" }, + { url = "https://files.pythonhosted.org/packages/56/84/b6d8a7ecf3f62d7dd09d9d10bbf89fad6837970ef868b35b5ffa0d24d9de/propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe", size = 200712, upload-time = "2025-06-09T22:55:59.906Z" }, + { url = "https://files.pythonhosted.org/packages/bf/32/889f4903ddfe4a9dc61da71ee58b763758cf2d608fe1decede06e6467f8d/propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1", size = 38187, upload-time = "2025-06-09T22:56:01.212Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/d666795fb9ba1dc139d30de64f3b6fd1ff9c9d3d96ccfdb992cd715ce5d2/propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9", size = 42025, upload-time = "2025-06-09T22:56:02.875Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + [[package]] name = "psutil" version = "7.0.0" @@ -2241,6 +3473,130 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, ] +[[package]] +name = "pydantic" +version = "2.11.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", size = 2028677, upload-time = "2025-04-23T18:32:27.227Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", size = 1864735, upload-time = "2025-04-23T18:32:29.019Z" }, + { url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467, upload-time = "2025-04-23T18:32:31.119Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041, upload-time = "2025-04-23T18:32:33.655Z" }, + { url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503, upload-time = "2025-04-23T18:32:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079, upload-time = "2025-04-23T18:32:37.659Z" }, + { url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508, upload-time = "2025-04-23T18:32:39.637Z" }, + { url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693, upload-time = "2025-04-23T18:32:41.818Z" }, + { url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224, upload-time = "2025-04-23T18:32:44.033Z" }, + { url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403, upload-time = "2025-04-23T18:32:45.836Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331, upload-time = "2025-04-23T18:32:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", size = 1910571, upload-time = "2025-04-23T18:32:49.401Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", size = 1956504, upload-time = "2025-04-23T18:32:51.287Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", size = 2024034, upload-time = "2025-04-23T18:33:32.843Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", size = 1858578, upload-time = "2025-04-23T18:33:34.912Z" }, + { url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858, upload-time = "2025-04-23T18:33:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498, upload-time = "2025-04-23T18:33:38.997Z" }, + { url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428, upload-time = "2025-04-23T18:33:41.18Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854, upload-time = "2025-04-23T18:33:43.446Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859, upload-time = "2025-04-23T18:33:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059, upload-time = "2025-04-23T18:33:47.735Z" }, + { url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661, upload-time = "2025-04-23T18:33:49.995Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -2337,6 +3693,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + [[package]] name = "pytz" version = "2025.2" @@ -2512,6 +3877,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/37/c1f26d13e9d4c3bfce42fead8ff640f6c06a58decde49a6b295b9d52cefd/pyzmq-27.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:70b719a130b81dd130a57ac0ff636dc2c0127c5b35ca5467d1b67057e3c7a4d2", size = 544561, upload-time = "2025-08-03T05:05:38.608Z" }, ] +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + [[package]] name = "regex" version = "2025.7.34" @@ -2633,6 +4012,182 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/60/50fbb6ffb35f733654466f1a90d162bcbea358adc3b0871339254fbc37b2/requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14", size = 14782, upload-time = "2025-05-21T13:42:04.007Z" }, ] +[[package]] +name = "rich" +version = "14.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/d9/991a0dee12d9fc53ed027e26a26a64b151d77252ac477e22666b9688bc16/rpds_py-0.27.0.tar.gz", hash = "sha256:8b23cf252f180cda89220b378d917180f29d313cd6a07b2431c0d3b776aae86f", size = 27420, upload-time = "2025-08-07T08:26:39.624Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/2d/ad2e37dee3f45580f7fa0066c412a521f9bee53d2718b0e9436d308a1ecd/rpds_py-0.27.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:130c1ffa5039a333f5926b09e346ab335f0d4ec393b030a18549a7c7e7c2cea4", size = 371511, upload-time = "2025-08-07T08:23:06.205Z" }, + { url = "https://files.pythonhosted.org/packages/f5/67/57b4b2479193fde9dd6983a13c2550b5f9c3bcdf8912dffac2068945eb14/rpds_py-0.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a4cf32a26fa744101b67bfd28c55d992cd19438aff611a46cac7f066afca8fd4", size = 354718, upload-time = "2025-08-07T08:23:08.222Z" }, + { url = "https://files.pythonhosted.org/packages/a3/be/c2b95ec4b813eb11f3a3c3d22f22bda8d3a48a074a0519cde968c4d102cf/rpds_py-0.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64a0fe3f334a40b989812de70160de6b0ec7e3c9e4a04c0bbc48d97c5d3600ae", size = 381518, upload-time = "2025-08-07T08:23:09.696Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d2/5a7279bc2b93b20bd50865a2269016238cee45f7dc3cc33402a7f41bd447/rpds_py-0.27.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a0ff7ee28583ab30a52f371b40f54e7138c52ca67f8ca17ccb7ccf0b383cb5f", size = 396694, upload-time = "2025-08-07T08:23:11.105Z" }, + { url = "https://files.pythonhosted.org/packages/65/e9/bac8b3714bd853c5bcb466e04acfb9a5da030d77e0ddf1dfad9afb791c31/rpds_py-0.27.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15ea4d2e182345dd1b4286593601d766411b43f868924afe297570658c31a62b", size = 514813, upload-time = "2025-08-07T08:23:12.215Z" }, + { url = "https://files.pythonhosted.org/packages/1d/aa/293115e956d7d13b7d2a9e9a4121f74989a427aa125f00ce4426ca8b7b28/rpds_py-0.27.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36184b44bf60a480863e51021c26aca3dfe8dd2f5eeabb33622b132b9d8b8b54", size = 402246, upload-time = "2025-08-07T08:23:13.699Z" }, + { url = "https://files.pythonhosted.org/packages/88/59/2d6789bb898fb3e2f0f7b82b7bcf27f579ebcb6cc36c24f4e208f7f58a5b/rpds_py-0.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b78430703cfcf5f5e86eb74027a1ed03a93509273d7c705babb547f03e60016", size = 383661, upload-time = "2025-08-07T08:23:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/0c/55/add13a593a7a81243a9eed56d618d3d427be5dc1214931676e3f695dfdc1/rpds_py-0.27.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:dbd749cff1defbde270ca346b69b3baf5f1297213ef322254bf2a28537f0b046", size = 401691, upload-time = "2025-08-07T08:23:16.681Z" }, + { url = "https://files.pythonhosted.org/packages/04/09/3e8b2aad494ffaca571e4e19611a12cc18fcfd756d9274f3871a2d822445/rpds_py-0.27.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bde37765564cd22a676dd8101b657839a1854cfaa9c382c5abf6ff7accfd4ae", size = 416529, upload-time = "2025-08-07T08:23:17.863Z" }, + { url = "https://files.pythonhosted.org/packages/a4/6d/bd899234728f1d8f72c9610f50fdf1c140ecd0a141320e1f1d0f6b20595d/rpds_py-0.27.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1d66f45b9399036e890fb9c04e9f70c33857fd8f58ac8db9f3278cfa835440c3", size = 558673, upload-time = "2025-08-07T08:23:18.99Z" }, + { url = "https://files.pythonhosted.org/packages/79/f4/f3e02def5193fb899d797c232f90d6f8f0f2b9eca2faef6f0d34cbc89b2e/rpds_py-0.27.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d85d784c619370d9329bbd670f41ff5f2ae62ea4519761b679d0f57f0f0ee267", size = 588426, upload-time = "2025-08-07T08:23:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0c/88e716cd8fd760e5308835fe298255830de4a1c905fd51760b9bb40aa965/rpds_py-0.27.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5df559e9e7644d9042f626f2c3997b555f347d7a855a15f170b253f6c5bfe358", size = 554552, upload-time = "2025-08-07T08:23:21.714Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a9/0a8243c182e7ac59b901083dff7e671feba6676a131bfff3f8d301cd2b36/rpds_py-0.27.0-cp310-cp310-win32.whl", hash = "sha256:b8a4131698b6992b2a56015f51646711ec5d893a0b314a4b985477868e240c87", size = 218081, upload-time = "2025-08-07T08:23:23.273Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e7/202ff35852312760148be9e08fe2ba6900aa28e7a46940a313eae473c10c/rpds_py-0.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:cbc619e84a5e3ab2d452de831c88bdcad824414e9c2d28cd101f94dbdf26329c", size = 230077, upload-time = "2025-08-07T08:23:24.308Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/49d515434c1752e40f5e35b985260cf27af052593378580a2f139a5be6b8/rpds_py-0.27.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:dbc2ab5d10544eb485baa76c63c501303b716a5c405ff2469a1d8ceffaabf622", size = 371577, upload-time = "2025-08-07T08:23:25.379Z" }, + { url = "https://files.pythonhosted.org/packages/e1/6d/bf2715b2fee5087fa13b752b5fd573f1a93e4134c74d275f709e38e54fe7/rpds_py-0.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ec85994f96a58cf7ed288caa344b7fe31fd1d503bdf13d7331ead5f70ab60d5", size = 354959, upload-time = "2025-08-07T08:23:26.767Z" }, + { url = "https://files.pythonhosted.org/packages/a3/5c/e7762808c746dd19733a81373c10da43926f6a6adcf4920a21119697a60a/rpds_py-0.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:190d7285cd3bb6d31d37a0534d7359c1ee191eb194c511c301f32a4afa5a1dd4", size = 381485, upload-time = "2025-08-07T08:23:27.869Z" }, + { url = "https://files.pythonhosted.org/packages/40/51/0d308eb0b558309ca0598bcba4243f52c4cd20e15fe991b5bd75824f2e61/rpds_py-0.27.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10d92fb6d7fd827e44055fcd932ad93dac6a11e832d51534d77b97d1d85400f", size = 396816, upload-time = "2025-08-07T08:23:29.424Z" }, + { url = "https://files.pythonhosted.org/packages/5c/aa/2d585ec911d78f66458b2c91252134ca0c7c70f687a72c87283173dc0c96/rpds_py-0.27.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd2c1d27ebfe6a015cfa2005b7fe8c52d5019f7bbdd801bc6f7499aab9ae739e", size = 514950, upload-time = "2025-08-07T08:23:30.576Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ef/aced551cc1148179557aed84343073adadf252c91265263ee6203458a186/rpds_py-0.27.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4790c9d5dd565ddb3e9f656092f57268951398cef52e364c405ed3112dc7c7c1", size = 402132, upload-time = "2025-08-07T08:23:32.428Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ac/cf644803d8d417653fe2b3604186861d62ea6afaef1b2284045741baef17/rpds_py-0.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4300e15e7d03660f04be84a125d1bdd0e6b2f674bc0723bc0fd0122f1a4585dc", size = 383660, upload-time = "2025-08-07T08:23:33.829Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ec/caf47c55ce02b76cbaeeb2d3b36a73da9ca2e14324e3d75cf72b59dcdac5/rpds_py-0.27.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:59195dc244fc183209cf8a93406889cadde47dfd2f0a6b137783aa9c56d67c85", size = 401730, upload-time = "2025-08-07T08:23:34.97Z" }, + { url = "https://files.pythonhosted.org/packages/0b/71/c1f355afdcd5b99ffc253422aa4bdcb04ccf1491dcd1bda3688a0c07fd61/rpds_py-0.27.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fae4a01ef8c4cb2bbe92ef2063149596907dc4a881a8d26743b3f6b304713171", size = 416122, upload-time = "2025-08-07T08:23:36.062Z" }, + { url = "https://files.pythonhosted.org/packages/38/0f/f4b5b1eda724ed0e04d2b26d8911cdc131451a7ee4c4c020a1387e5c6ded/rpds_py-0.27.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e3dc8d4ede2dbae6c0fc2b6c958bf51ce9fd7e9b40c0f5b8835c3fde44f5807d", size = 558771, upload-time = "2025-08-07T08:23:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/93/c0/5f8b834db2289ab48d5cffbecbb75e35410103a77ac0b8da36bf9544ec1c/rpds_py-0.27.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3782fb753aa825b4ccabc04292e07897e2fd941448eabf666856c5530277626", size = 587876, upload-time = "2025-08-07T08:23:38.662Z" }, + { url = "https://files.pythonhosted.org/packages/d2/dd/1a1df02ab8eb970115cff2ae31a6f73916609b900dc86961dc382b8c2e5e/rpds_py-0.27.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:887ab1f12b0d227e9260558a4a2320024b20102207ada65c43e1ffc4546df72e", size = 554359, upload-time = "2025-08-07T08:23:39.897Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e4/95a014ab0d51ab6e3bebbdb476a42d992d2bbf9c489d24cff9fda998e925/rpds_py-0.27.0-cp311-cp311-win32.whl", hash = "sha256:5d6790ff400254137b81b8053b34417e2c46921e302d655181d55ea46df58cf7", size = 218084, upload-time = "2025-08-07T08:23:41.086Z" }, + { url = "https://files.pythonhosted.org/packages/49/78/f8d5b71ec65a0376b0de31efcbb5528ce17a9b7fdd19c3763303ccfdedec/rpds_py-0.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:e24d8031a2c62f34853756d9208eeafa6b940a1efcbfe36e8f57d99d52bb7261", size = 230085, upload-time = "2025-08-07T08:23:42.143Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d3/84429745184091e06b4cc70f8597408e314c2d2f7f5e13249af9ffab9e3d/rpds_py-0.27.0-cp311-cp311-win_arm64.whl", hash = "sha256:08680820d23df1df0a0260f714d12966bc6c42d02e8055a91d61e03f0c47dda0", size = 222112, upload-time = "2025-08-07T08:23:43.233Z" }, + { url = "https://files.pythonhosted.org/packages/cd/17/e67309ca1ac993fa1888a0d9b2f5ccc1f67196ace32e76c9f8e1dbbbd50c/rpds_py-0.27.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:19c990fdf5acecbf0623e906ae2e09ce1c58947197f9bced6bbd7482662231c4", size = 362611, upload-time = "2025-08-07T08:23:44.773Z" }, + { url = "https://files.pythonhosted.org/packages/93/2e/28c2fb84aa7aa5d75933d1862d0f7de6198ea22dfd9a0cca06e8a4e7509e/rpds_py-0.27.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c27a7054b5224710fcfb1a626ec3ff4f28bcb89b899148c72873b18210e446b", size = 347680, upload-time = "2025-08-07T08:23:46.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/3e/9834b4c8f4f5fe936b479e623832468aa4bd6beb8d014fecaee9eac6cdb1/rpds_py-0.27.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09965b314091829b378b60607022048953e25f0b396c2b70e7c4c81bcecf932e", size = 384600, upload-time = "2025-08-07T08:23:48Z" }, + { url = "https://files.pythonhosted.org/packages/19/78/744123c7b38865a965cd9e6f691fde7ef989a00a256fa8bf15b75240d12f/rpds_py-0.27.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:14f028eb47f59e9169bfdf9f7ceafd29dd64902141840633683d0bad5b04ff34", size = 400697, upload-time = "2025-08-07T08:23:49.407Z" }, + { url = "https://files.pythonhosted.org/packages/32/97/3c3d32fe7daee0a1f1a678b6d4dfb8c4dcf88197fa2441f9da7cb54a8466/rpds_py-0.27.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6168af0be75bba990a39f9431cdfae5f0ad501f4af32ae62e8856307200517b8", size = 517781, upload-time = "2025-08-07T08:23:50.557Z" }, + { url = "https://files.pythonhosted.org/packages/b2/be/28f0e3e733680aa13ecec1212fc0f585928a206292f14f89c0b8a684cad1/rpds_py-0.27.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab47fe727c13c09d0e6f508e3a49e545008e23bf762a245b020391b621f5b726", size = 406449, upload-time = "2025-08-07T08:23:51.732Z" }, + { url = "https://files.pythonhosted.org/packages/95/ae/5d15c83e337c082d0367053baeb40bfba683f42459f6ebff63a2fd7e5518/rpds_py-0.27.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fa01b3d5e3b7d97efab65bd3d88f164e289ec323a8c033c5c38e53ee25c007e", size = 386150, upload-time = "2025-08-07T08:23:52.822Z" }, + { url = "https://files.pythonhosted.org/packages/bf/65/944e95f95d5931112829e040912b25a77b2e7ed913ea5fe5746aa5c1ce75/rpds_py-0.27.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:6c135708e987f46053e0a1246a206f53717f9fadfba27174a9769ad4befba5c3", size = 406100, upload-time = "2025-08-07T08:23:54.339Z" }, + { url = "https://files.pythonhosted.org/packages/21/a4/1664b83fae02894533cd11dc0b9f91d673797c2185b7be0f7496107ed6c5/rpds_py-0.27.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc327f4497b7087d06204235199daf208fd01c82d80465dc5efa4ec9df1c5b4e", size = 421345, upload-time = "2025-08-07T08:23:55.832Z" }, + { url = "https://files.pythonhosted.org/packages/7c/26/b7303941c2b0823bfb34c71378249f8beedce57301f400acb04bb345d025/rpds_py-0.27.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e57906e38583a2cba67046a09c2637e23297618dc1f3caddbc493f2be97c93f", size = 561891, upload-time = "2025-08-07T08:23:56.951Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c8/48623d64d4a5a028fa99576c768a6159db49ab907230edddc0b8468b998b/rpds_py-0.27.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f4f69d7a4300fbf91efb1fb4916421bd57804c01ab938ab50ac9c4aa2212f03", size = 591756, upload-time = "2025-08-07T08:23:58.146Z" }, + { url = "https://files.pythonhosted.org/packages/b3/51/18f62617e8e61cc66334c9fb44b1ad7baae3438662098efbc55fb3fda453/rpds_py-0.27.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4c4fbbcff474e1e5f38be1bf04511c03d492d42eec0babda5d03af3b5589374", size = 557088, upload-time = "2025-08-07T08:23:59.6Z" }, + { url = "https://files.pythonhosted.org/packages/bd/4c/e84c3a276e2496a93d245516be6b49e20499aa8ca1c94d59fada0d79addc/rpds_py-0.27.0-cp312-cp312-win32.whl", hash = "sha256:27bac29bbbf39601b2aab474daf99dbc8e7176ca3389237a23944b17f8913d97", size = 221926, upload-time = "2025-08-07T08:24:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/83/89/9d0fbcef64340db0605eb0a0044f258076f3ae0a3b108983b2c614d96212/rpds_py-0.27.0-cp312-cp312-win_amd64.whl", hash = "sha256:8a06aa1197ec0281eb1d7daf6073e199eb832fe591ffa329b88bae28f25f5fe5", size = 233235, upload-time = "2025-08-07T08:24:01.846Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b0/e177aa9f39cbab060f96de4a09df77d494f0279604dc2f509263e21b05f9/rpds_py-0.27.0-cp312-cp312-win_arm64.whl", hash = "sha256:e14aab02258cb776a108107bd15f5b5e4a1bbaa61ef33b36693dfab6f89d54f9", size = 223315, upload-time = "2025-08-07T08:24:03.337Z" }, + { url = "https://files.pythonhosted.org/packages/81/d2/dfdfd42565a923b9e5a29f93501664f5b984a802967d48d49200ad71be36/rpds_py-0.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:443d239d02d9ae55b74015234f2cd8eb09e59fbba30bf60baeb3123ad4c6d5ff", size = 362133, upload-time = "2025-08-07T08:24:04.508Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/0a2e2460c4b66021d349ce9f6331df1d6c75d7eea90df9785d333a49df04/rpds_py-0.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8a7acf04fda1f30f1007f3cc96d29d8cf0a53e626e4e1655fdf4eabc082d367", size = 347128, upload-time = "2025-08-07T08:24:05.695Z" }, + { url = "https://files.pythonhosted.org/packages/35/8d/7d1e4390dfe09d4213b3175a3f5a817514355cb3524593380733204f20b9/rpds_py-0.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0f92b78cfc3b74a42239fdd8c1266f4715b573204c234d2f9fc3fc7a24f185", size = 384027, upload-time = "2025-08-07T08:24:06.841Z" }, + { url = "https://files.pythonhosted.org/packages/c1/65/78499d1a62172891c8cd45de737b2a4b84a414b6ad8315ab3ac4945a5b61/rpds_py-0.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ce4ed8e0c7dbc5b19352b9c2c6131dd23b95fa8698b5cdd076307a33626b72dc", size = 399973, upload-time = "2025-08-07T08:24:08.143Z" }, + { url = "https://files.pythonhosted.org/packages/10/a1/1c67c1d8cc889107b19570bb01f75cf49852068e95e6aee80d22915406fc/rpds_py-0.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fde355b02934cc6b07200cc3b27ab0c15870a757d1a72fd401aa92e2ea3c6bfe", size = 515295, upload-time = "2025-08-07T08:24:09.711Z" }, + { url = "https://files.pythonhosted.org/packages/df/27/700ec88e748436b6c7c4a2262d66e80f8c21ab585d5e98c45e02f13f21c0/rpds_py-0.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13bbc4846ae4c993f07c93feb21a24d8ec637573d567a924b1001e81c8ae80f9", size = 406737, upload-time = "2025-08-07T08:24:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/33/cc/6b0ee8f0ba3f2df2daac1beda17fde5cf10897a7d466f252bd184ef20162/rpds_py-0.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0744661afbc4099fef7f4e604e7f1ea1be1dd7284f357924af12a705cc7d5c", size = 385898, upload-time = "2025-08-07T08:24:12.798Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7e/c927b37d7d33c0a0ebf249cc268dc2fcec52864c1b6309ecb960497f2285/rpds_py-0.27.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:069e0384a54f427bd65d7fda83b68a90606a3835901aaff42185fcd94f5a9295", size = 405785, upload-time = "2025-08-07T08:24:14.906Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/8ed50746d909dcf402af3fa58b83d5a590ed43e07251d6b08fad1a535ba6/rpds_py-0.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bc262ace5a1a7dc3e2eac2fa97b8257ae795389f688b5adf22c5db1e2431c43", size = 419760, upload-time = "2025-08-07T08:24:16.129Z" }, + { url = "https://files.pythonhosted.org/packages/d3/60/2b2071aee781cb3bd49f94d5d35686990b925e9b9f3e3d149235a6f5d5c1/rpds_py-0.27.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2fe6e18e5c8581f0361b35ae575043c7029d0a92cb3429e6e596c2cdde251432", size = 561201, upload-time = "2025-08-07T08:24:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/98/1f/27b67304272521aaea02be293fecedce13fa351a4e41cdb9290576fc6d81/rpds_py-0.27.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d93ebdb82363d2e7bec64eecdc3632b59e84bd270d74fe5be1659f7787052f9b", size = 591021, upload-time = "2025-08-07T08:24:18.999Z" }, + { url = "https://files.pythonhosted.org/packages/db/9b/a2fadf823164dd085b1f894be6443b0762a54a7af6f36e98e8fcda69ee50/rpds_py-0.27.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0954e3a92e1d62e83a54ea7b3fdc9efa5d61acef8488a8a3d31fdafbfb00460d", size = 556368, upload-time = "2025-08-07T08:24:20.54Z" }, + { url = "https://files.pythonhosted.org/packages/24/f3/6d135d46a129cda2e3e6d4c5e91e2cc26ea0428c6cf152763f3f10b6dd05/rpds_py-0.27.0-cp313-cp313-win32.whl", hash = "sha256:2cff9bdd6c7b906cc562a505c04a57d92e82d37200027e8d362518df427f96cd", size = 221236, upload-time = "2025-08-07T08:24:22.144Z" }, + { url = "https://files.pythonhosted.org/packages/c5/44/65d7494f5448ecc755b545d78b188440f81da98b50ea0447ab5ebfdf9bd6/rpds_py-0.27.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc79d192fb76fc0c84f2c58672c17bbbc383fd26c3cdc29daae16ce3d927e8b2", size = 232634, upload-time = "2025-08-07T08:24:23.642Z" }, + { url = "https://files.pythonhosted.org/packages/70/d9/23852410fadab2abb611733933401de42a1964ce6600a3badae35fbd573e/rpds_py-0.27.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b3a5c8089eed498a3af23ce87a80805ff98f6ef8f7bdb70bd1b7dae5105f6ac", size = 222783, upload-time = "2025-08-07T08:24:25.098Z" }, + { url = "https://files.pythonhosted.org/packages/15/75/03447917f78512b34463f4ef11066516067099a0c466545655503bed0c77/rpds_py-0.27.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:90fb790138c1a89a2e58c9282fe1089638401f2f3b8dddd758499041bc6e0774", size = 359154, upload-time = "2025-08-07T08:24:26.249Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fc/4dac4fa756451f2122ddaf136e2c6aeb758dc6fdbe9ccc4bc95c98451d50/rpds_py-0.27.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:010c4843a3b92b54373e3d2291a7447d6c3fc29f591772cc2ea0e9f5c1da434b", size = 343909, upload-time = "2025-08-07T08:24:27.405Z" }, + { url = "https://files.pythonhosted.org/packages/7b/81/723c1ed8e6f57ed9d8c0c07578747a2d3d554aaefc1ab89f4e42cfeefa07/rpds_py-0.27.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9ce7a9e967afc0a2af7caa0d15a3e9c1054815f73d6a8cb9225b61921b419bd", size = 379340, upload-time = "2025-08-07T08:24:28.714Z" }, + { url = "https://files.pythonhosted.org/packages/98/16/7e3740413de71818ce1997df82ba5f94bae9fff90c0a578c0e24658e6201/rpds_py-0.27.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa0bf113d15e8abdfee92aa4db86761b709a09954083afcb5bf0f952d6065fdb", size = 391655, upload-time = "2025-08-07T08:24:30.223Z" }, + { url = "https://files.pythonhosted.org/packages/e0/63/2a9f510e124d80660f60ecce07953f3f2d5f0b96192c1365443859b9c87f/rpds_py-0.27.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb91d252b35004a84670dfeafadb042528b19842a0080d8b53e5ec1128e8f433", size = 513017, upload-time = "2025-08-07T08:24:31.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/4e/cf6ff311d09776c53ea1b4f2e6700b9d43bb4e99551006817ade4bbd6f78/rpds_py-0.27.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db8a6313dbac934193fc17fe7610f70cd8181c542a91382531bef5ed785e5615", size = 402058, upload-time = "2025-08-07T08:24:32.613Z" }, + { url = "https://files.pythonhosted.org/packages/88/11/5e36096d474cb10f2a2d68b22af60a3bc4164fd8db15078769a568d9d3ac/rpds_py-0.27.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce96ab0bdfcef1b8c371ada2100767ace6804ea35aacce0aef3aeb4f3f499ca8", size = 383474, upload-time = "2025-08-07T08:24:33.767Z" }, + { url = "https://files.pythonhosted.org/packages/db/a2/3dff02805b06058760b5eaa6d8cb8db3eb3e46c9e452453ad5fc5b5ad9fe/rpds_py-0.27.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:7451ede3560086abe1aa27dcdcf55cd15c96b56f543fb12e5826eee6f721f858", size = 400067, upload-time = "2025-08-07T08:24:35.021Z" }, + { url = "https://files.pythonhosted.org/packages/67/87/eed7369b0b265518e21ea836456a4ed4a6744c8c12422ce05bce760bb3cf/rpds_py-0.27.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:32196b5a99821476537b3f7732432d64d93a58d680a52c5e12a190ee0135d8b5", size = 412085, upload-time = "2025-08-07T08:24:36.267Z" }, + { url = "https://files.pythonhosted.org/packages/8b/48/f50b2ab2fbb422fbb389fe296e70b7a6b5ea31b263ada5c61377e710a924/rpds_py-0.27.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a029be818059870664157194e46ce0e995082ac49926f1423c1f058534d2aaa9", size = 555928, upload-time = "2025-08-07T08:24:37.573Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/b18eb51045d06887666c3560cd4bbb6819127b43d758f5adb82b5f56f7d1/rpds_py-0.27.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3841f66c1ffdc6cebce8aed64e36db71466f1dc23c0d9a5592e2a782a3042c79", size = 585527, upload-time = "2025-08-07T08:24:39.391Z" }, + { url = "https://files.pythonhosted.org/packages/be/03/a3dd6470fc76499959b00ae56295b76b4bdf7c6ffc60d62006b1217567e1/rpds_py-0.27.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:42894616da0fc0dcb2ec08a77896c3f56e9cb2f4b66acd76fc8992c3557ceb1c", size = 554211, upload-time = "2025-08-07T08:24:40.6Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d1/ee5fd1be395a07423ac4ca0bcc05280bf95db2b155d03adefeb47d5ebf7e/rpds_py-0.27.0-cp313-cp313t-win32.whl", hash = "sha256:b1fef1f13c842a39a03409e30ca0bf87b39a1e2a305a9924deadb75a43105d23", size = 216624, upload-time = "2025-08-07T08:24:42.204Z" }, + { url = "https://files.pythonhosted.org/packages/1c/94/4814c4c858833bf46706f87349c37ca45e154da7dbbec9ff09f1abeb08cc/rpds_py-0.27.0-cp313-cp313t-win_amd64.whl", hash = "sha256:183f5e221ba3e283cd36fdfbe311d95cd87699a083330b4f792543987167eff1", size = 230007, upload-time = "2025-08-07T08:24:43.329Z" }, + { url = "https://files.pythonhosted.org/packages/0e/a5/8fffe1c7dc7c055aa02df310f9fb71cfc693a4d5ccc5de2d3456ea5fb022/rpds_py-0.27.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:f3cd110e02c5bf17d8fb562f6c9df5c20e73029d587cf8602a2da6c5ef1e32cb", size = 362595, upload-time = "2025-08-07T08:24:44.478Z" }, + { url = "https://files.pythonhosted.org/packages/bc/c7/4e4253fd2d4bb0edbc0b0b10d9f280612ca4f0f990e3c04c599000fe7d71/rpds_py-0.27.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8d0e09cf4863c74106b5265c2c310f36146e2b445ff7b3018a56799f28f39f6f", size = 347252, upload-time = "2025-08-07T08:24:45.678Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/3d1a954d30f0174dd6baf18b57c215da03cf7846a9d6e0143304e784cddc/rpds_py-0.27.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f689ab822f9b5eb6dfc69893b4b9366db1d2420f7db1f6a2adf2a9ca15ad64", size = 384886, upload-time = "2025-08-07T08:24:46.86Z" }, + { url = "https://files.pythonhosted.org/packages/e0/52/3c5835f2df389832b28f9276dd5395b5a965cea34226e7c88c8fbec2093c/rpds_py-0.27.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e36c80c49853b3ffda7aa1831bf175c13356b210c73128c861f3aa93c3cc4015", size = 399716, upload-time = "2025-08-07T08:24:48.174Z" }, + { url = "https://files.pythonhosted.org/packages/40/73/176e46992461a1749686a2a441e24df51ff86b99c2d34bf39f2a5273b987/rpds_py-0.27.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6de6a7f622860af0146cb9ee148682ff4d0cea0b8fd3ad51ce4d40efb2f061d0", size = 517030, upload-time = "2025-08-07T08:24:49.52Z" }, + { url = "https://files.pythonhosted.org/packages/79/2a/7266c75840e8c6e70effeb0d38922a45720904f2cd695e68a0150e5407e2/rpds_py-0.27.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4045e2fc4b37ec4b48e8907a5819bdd3380708c139d7cc358f03a3653abedb89", size = 408448, upload-time = "2025-08-07T08:24:50.727Z" }, + { url = "https://files.pythonhosted.org/packages/e6/5f/a7efc572b8e235093dc6cf39f4dbc8a7f08e65fdbcec7ff4daeb3585eef1/rpds_py-0.27.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da162b718b12c4219eeeeb68a5b7552fbc7aadedf2efee440f88b9c0e54b45d", size = 387320, upload-time = "2025-08-07T08:24:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/a2/eb/9ff6bc92efe57cf5a2cb74dee20453ba444b6fdc85275d8c99e0d27239d1/rpds_py-0.27.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:0665be515767dc727ffa5f74bd2ef60b0ff85dad6bb8f50d91eaa6b5fb226f51", size = 407414, upload-time = "2025-08-07T08:24:53.664Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bd/3b9b19b00d5c6e1bd0f418c229ab0f8d3b110ddf7ec5d9d689ef783d0268/rpds_py-0.27.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:203f581accef67300a942e49a37d74c12ceeef4514874c7cede21b012613ca2c", size = 420766, upload-time = "2025-08-07T08:24:55.917Z" }, + { url = "https://files.pythonhosted.org/packages/17/6b/521a7b1079ce16258c70805166e3ac6ec4ee2139d023fe07954dc9b2d568/rpds_py-0.27.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7873b65686a6471c0037139aa000d23fe94628e0daaa27b6e40607c90e3f5ec4", size = 562409, upload-time = "2025-08-07T08:24:57.17Z" }, + { url = "https://files.pythonhosted.org/packages/8b/bf/65db5bfb14ccc55e39de8419a659d05a2a9cd232f0a699a516bb0991da7b/rpds_py-0.27.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:249ab91ceaa6b41abc5f19513cb95b45c6f956f6b89f1fe3d99c81255a849f9e", size = 590793, upload-time = "2025-08-07T08:24:58.388Z" }, + { url = "https://files.pythonhosted.org/packages/db/b8/82d368b378325191ba7aae8f40f009b78057b598d4394d1f2cdabaf67b3f/rpds_py-0.27.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d2f184336bc1d6abfaaa1262ed42739c3789b1e3a65a29916a615307d22ffd2e", size = 558178, upload-time = "2025-08-07T08:24:59.756Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ff/f270bddbfbc3812500f8131b1ebbd97afd014cd554b604a3f73f03133a36/rpds_py-0.27.0-cp314-cp314-win32.whl", hash = "sha256:d3c622c39f04d5751408f5b801ecb527e6e0a471b367f420a877f7a660d583f6", size = 222355, upload-time = "2025-08-07T08:25:01.027Z" }, + { url = "https://files.pythonhosted.org/packages/bf/20/fdab055b1460c02ed356a0e0b0a78c1dd32dc64e82a544f7b31c9ac643dc/rpds_py-0.27.0-cp314-cp314-win_amd64.whl", hash = "sha256:cf824aceaeffff029ccfba0da637d432ca71ab21f13e7f6f5179cd88ebc77a8a", size = 234007, upload-time = "2025-08-07T08:25:02.268Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a8/694c060005421797a3be4943dab8347c76c2b429a9bef68fb2c87c9e70c7/rpds_py-0.27.0-cp314-cp314-win_arm64.whl", hash = "sha256:86aca1616922b40d8ac1b3073a1ead4255a2f13405e5700c01f7c8d29a03972d", size = 223527, upload-time = "2025-08-07T08:25:03.45Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f9/77f4c90f79d2c5ca8ce6ec6a76cb4734ee247de6b3a4f337e289e1f00372/rpds_py-0.27.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:341d8acb6724c0c17bdf714319c393bb27f6d23d39bc74f94221b3e59fc31828", size = 359469, upload-time = "2025-08-07T08:25:04.648Z" }, + { url = "https://files.pythonhosted.org/packages/c0/22/b97878d2f1284286fef4172069e84b0b42b546ea7d053e5fb7adb9ac6494/rpds_py-0.27.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b96b0b784fe5fd03beffff2b1533dc0d85e92bab8d1b2c24ef3a5dc8fac5669", size = 343960, upload-time = "2025-08-07T08:25:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b0/dfd55b5bb480eda0578ae94ef256d3061d20b19a0f5e18c482f03e65464f/rpds_py-0.27.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c431bfb91478d7cbe368d0a699978050d3b112d7f1d440a41e90faa325557fd", size = 380201, upload-time = "2025-08-07T08:25:07.513Z" }, + { url = "https://files.pythonhosted.org/packages/28/22/e1fa64e50d58ad2b2053077e3ec81a979147c43428de9e6de68ddf6aff4e/rpds_py-0.27.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20e222a44ae9f507d0f2678ee3dd0c45ec1e930f6875d99b8459631c24058aec", size = 392111, upload-time = "2025-08-07T08:25:09.149Z" }, + { url = "https://files.pythonhosted.org/packages/49/f9/43ab7a43e97aedf6cea6af70fdcbe18abbbc41d4ae6cdec1bfc23bbad403/rpds_py-0.27.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:184f0d7b342967f6cda94a07d0e1fae177d11d0b8f17d73e06e36ac02889f303", size = 515863, upload-time = "2025-08-07T08:25:10.431Z" }, + { url = "https://files.pythonhosted.org/packages/38/9b/9bd59dcc636cd04d86a2d20ad967770bf348f5eb5922a8f29b547c074243/rpds_py-0.27.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a00c91104c173c9043bc46f7b30ee5e6d2f6b1149f11f545580f5d6fdff42c0b", size = 402398, upload-time = "2025-08-07T08:25:11.819Z" }, + { url = "https://files.pythonhosted.org/packages/71/bf/f099328c6c85667aba6b66fa5c35a8882db06dcd462ea214be72813a0dd2/rpds_py-0.27.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7a37dd208f0d658e0487522078b1ed68cd6bce20ef4b5a915d2809b9094b410", size = 384665, upload-time = "2025-08-07T08:25:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c5/9c1f03121ece6634818490bd3c8be2c82a70928a19de03467fb25a3ae2a8/rpds_py-0.27.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:92f3b3ec3e6008a1fe00b7c0946a170f161ac00645cde35e3c9a68c2475e8156", size = 400405, upload-time = "2025-08-07T08:25:14.417Z" }, + { url = "https://files.pythonhosted.org/packages/b5/b8/e25d54af3e63ac94f0c16d8fe143779fe71ff209445a0c00d0f6984b6b2c/rpds_py-0.27.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1b3db5fae5cbce2131b7420a3f83553d4d89514c03d67804ced36161fe8b6b2", size = 413179, upload-time = "2025-08-07T08:25:15.664Z" }, + { url = "https://files.pythonhosted.org/packages/f9/d1/406b3316433fe49c3021546293a04bc33f1478e3ec7950215a7fce1a1208/rpds_py-0.27.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5355527adaa713ab693cbce7c1e0ec71682f599f61b128cf19d07e5c13c9b1f1", size = 556895, upload-time = "2025-08-07T08:25:17.061Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bc/3697c0c21fcb9a54d46ae3b735eb2365eea0c2be076b8f770f98e07998de/rpds_py-0.27.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fcc01c57ce6e70b728af02b2401c5bc853a9e14eb07deda30624374f0aebfe42", size = 585464, upload-time = "2025-08-07T08:25:18.406Z" }, + { url = "https://files.pythonhosted.org/packages/63/09/ee1bb5536f99f42c839b177d552f6114aa3142d82f49cef49261ed28dbe0/rpds_py-0.27.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3001013dae10f806380ba739d40dee11db1ecb91684febb8406a87c2ded23dae", size = 555090, upload-time = "2025-08-07T08:25:20.461Z" }, + { url = "https://files.pythonhosted.org/packages/7d/2c/363eada9e89f7059199d3724135a86c47082cbf72790d6ba2f336d146ddb/rpds_py-0.27.0-cp314-cp314t-win32.whl", hash = "sha256:0f401c369186a5743694dd9fc08cba66cf70908757552e1f714bfc5219c655b5", size = 218001, upload-time = "2025-08-07T08:25:21.761Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3f/d6c216ed5199c9ef79e2a33955601f454ed1e7420a93b89670133bca5ace/rpds_py-0.27.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8a1dca5507fa1337f75dcd5070218b20bc68cf8844271c923c1b79dfcbc20391", size = 230993, upload-time = "2025-08-07T08:25:23.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2e/82fee0cb7142bc32a9ce586eadd24a945257c016902d575bb377ad5feb10/rpds_py-0.27.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e0d7151a1bd5d0a203a5008fc4ae51a159a610cb82ab0a9b2c4d80241745582e", size = 371495, upload-time = "2025-08-07T08:25:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b5/b421756c7e5cc1d2bb438a34b16f750363d0d87caf2bfa6f2326423c42e5/rpds_py-0.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42ccc57ff99166a55a59d8c7d14f1a357b7749f9ed3584df74053fd098243451", size = 354823, upload-time = "2025-08-07T08:25:25.854Z" }, + { url = "https://files.pythonhosted.org/packages/f9/4a/63337bbabfa38d4094144d0e689758e8452372fd3e45359b806fc1b4c022/rpds_py-0.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e377e4cf8795cdbdff75b8f0223d7b6c68ff4fef36799d88ccf3a995a91c0112", size = 381538, upload-time = "2025-08-07T08:25:27.17Z" }, + { url = "https://files.pythonhosted.org/packages/33/8b/14eb61fb9a5bb830d28c548e3e67046fd04cae06c2ce6afe7f30aba7f7f0/rpds_py-0.27.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:79af163a4b40bbd8cfd7ca86ec8b54b81121d3b213b4435ea27d6568bcba3e9d", size = 396724, upload-time = "2025-08-07T08:25:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/03/54/47faf6aa4040443b108b24ae08e9db6fe6daaa8140b696f905833f325293/rpds_py-0.27.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2eff8ee57c5996b0d2a07c3601fb4ce5fbc37547344a26945dd9e5cbd1ed27a", size = 517084, upload-time = "2025-08-07T08:25:29.698Z" }, + { url = "https://files.pythonhosted.org/packages/0b/88/a78dbacc9a96e3ea7e83d9bed8f272754e618c629ed6a9f8e2a506c84419/rpds_py-0.27.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7cf9bc4508efb18d8dff6934b602324eb9f8c6644749627ce001d6f38a490889", size = 402397, upload-time = "2025-08-07T08:25:31.21Z" }, + { url = "https://files.pythonhosted.org/packages/6b/88/268c6422c0c3a0f01bf6e79086f6e4dbc6a2e60a6e95413ad17e3392ec0a/rpds_py-0.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05284439ebe7d9f5f5a668d4d8a0a1d851d16f7d47c78e1fab968c8ad30cab04", size = 383570, upload-time = "2025-08-07T08:25:32.842Z" }, + { url = "https://files.pythonhosted.org/packages/9c/1a/34f5a2459b9752cc08e02c3845c8f570222f7dbd48c7baac4b827701a40e/rpds_py-0.27.0-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:1321bce595ad70e80f97f998db37356b2e22cf98094eba6fe91782e626da2f71", size = 401771, upload-time = "2025-08-07T08:25:34.201Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9b/16979115f2ec783ca06454a141a0f32f082763ef874675c5f756e6e76fcd/rpds_py-0.27.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:737005088449ddd3b3df5a95476ee1c2c5c669f5c30eed909548a92939c0e12d", size = 416215, upload-time = "2025-08-07T08:25:35.559Z" }, + { url = "https://files.pythonhosted.org/packages/81/0b/0305df88fb22db8efe81753ce4ec51b821555448fd94ec77ae4e5dfd57b7/rpds_py-0.27.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b2a4e17bfd68536c3b801800941c95a1d4a06e3cada11c146093ba939d9638d", size = 558573, upload-time = "2025-08-07T08:25:36.935Z" }, + { url = "https://files.pythonhosted.org/packages/84/9a/c48be4da43a556495cf66d6bf71a16e8e3e22ae8e724b678e430521d0702/rpds_py-0.27.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dc6b0d5a1ea0318ef2def2b6a55dccf1dcaf77d605672347271ed7b829860765", size = 587956, upload-time = "2025-08-07T08:25:38.338Z" }, + { url = "https://files.pythonhosted.org/packages/76/95/deb1111abde461330c4dad22b14347d064161fb7cb249746a06accc07633/rpds_py-0.27.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4c3f8a0d4802df34fcdbeb3dfe3a4d8c9a530baea8fafdf80816fcaac5379d83", size = 554493, upload-time = "2025-08-07T08:25:39.665Z" }, + { url = "https://files.pythonhosted.org/packages/cb/16/5342d91917f26da91fc193932d9fbf422e2903aaee9bd3c6ecb4875ef17f/rpds_py-0.27.0-cp39-cp39-win32.whl", hash = "sha256:699c346abc73993962cac7bb4f02f58e438840fa5458a048d3a178a7a670ba86", size = 218302, upload-time = "2025-08-07T08:25:41.401Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a3/0346108a47efe41b50d8781688b7fb16b18d252053486c932d10b18977c9/rpds_py-0.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:be806e2961cd390a89d6c3ce8c2ae34271cfcd05660f716257838bb560f1c3b6", size = 229977, upload-time = "2025-08-07T08:25:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/47/55/287068956f9ba1cb40896d291213f09fdd4527630709058b45a592bc09dc/rpds_py-0.27.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:46f48482c1a4748ab2773f75fffbdd1951eb59794e32788834b945da857c47a8", size = 371566, upload-time = "2025-08-07T08:25:43.95Z" }, + { url = "https://files.pythonhosted.org/packages/a2/fb/443af59cbe552e89680bb0f1d1ba47f6387b92083e28a45b8c8863b86c5a/rpds_py-0.27.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:419dd9c98bcc9fb0242be89e0c6e922df333b975d4268faa90d58499fd9c9ebe", size = 355781, upload-time = "2025-08-07T08:25:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/ad/f0/35f48bb073b5ca42b1dcc55cb148f4a3bd4411a3e584f6a18d26f0ea8832/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d42a0ef2bdf6bc81e1cc2d49d12460f63c6ae1423c4f4851b828e454ccf6f1", size = 382575, upload-time = "2025-08-07T08:25:46.524Z" }, + { url = "https://files.pythonhosted.org/packages/51/e1/5f5296a21d1189f0f116a938af2e346d83172bf814d373695e54004a936f/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e39169ac6aae06dd79c07c8a69d9da867cef6a6d7883a0186b46bb46ccfb0c3", size = 397435, upload-time = "2025-08-07T08:25:48.204Z" }, + { url = "https://files.pythonhosted.org/packages/97/79/3af99b7852b2b55cad8a08863725cbe9dc14781bcf7dc6ecead0c3e1dc54/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:935afcdea4751b0ac918047a2df3f720212892347767aea28f5b3bf7be4f27c0", size = 514861, upload-time = "2025-08-07T08:25:49.814Z" }, + { url = "https://files.pythonhosted.org/packages/df/3e/11fd6033708ed3ae0e6947bb94f762f56bb46bf59a1b16eef6944e8a62ee/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8de567dec6d451649a781633d36f5c7501711adee329d76c095be2178855b042", size = 402776, upload-time = "2025-08-07T08:25:51.135Z" }, + { url = "https://files.pythonhosted.org/packages/b7/89/f9375ceaa996116de9cbc949874804c7874d42fb258c384c037a46d730b8/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:555ed147cbe8c8f76e72a4c6cd3b7b761cbf9987891b9448808148204aed74a5", size = 384665, upload-time = "2025-08-07T08:25:52.82Z" }, + { url = "https://files.pythonhosted.org/packages/48/bf/0061e55c6f1f573a63c0f82306b8984ed3b394adafc66854a936d5db3522/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:d2cc2b34f9e1d31ce255174da82902ad75bd7c0d88a33df54a77a22f2ef421ee", size = 402518, upload-time = "2025-08-07T08:25:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/ae/dc/8d506676bfe87b3b683332ec8e6ab2b0be118a3d3595ed021e3274a63191/rpds_py-0.27.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cb0702c12983be3b2fab98ead349ac63a98216d28dda6f518f52da5498a27a1b", size = 416247, upload-time = "2025-08-07T08:25:55.433Z" }, + { url = "https://files.pythonhosted.org/packages/2e/02/9a89eea1b75c69e81632de7963076e455b1e00e1cfb46dfdabb055fa03e3/rpds_py-0.27.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ba783541be46f27c8faea5a6645e193943c17ea2f0ffe593639d906a327a9bcc", size = 559456, upload-time = "2025-08-07T08:25:56.866Z" }, + { url = "https://files.pythonhosted.org/packages/38/4a/0f3ac4351957847c0d322be6ec72f916e43804a2c1d04e9672ea4a67c315/rpds_py-0.27.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:2406d034635d1497c596c40c85f86ecf2bf9611c1df73d14078af8444fe48031", size = 587778, upload-time = "2025-08-07T08:25:58.202Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8e/39d0d7401095bed5a5ad5ef304fae96383f9bef40ca3f3a0807ff5b68d9d/rpds_py-0.27.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dea0808153f1fbbad772669d906cddd92100277533a03845de6893cadeffc8be", size = 555247, upload-time = "2025-08-07T08:25:59.707Z" }, + { url = "https://files.pythonhosted.org/packages/e0/04/6b8311e811e620b9eaca67cd80a118ff9159558a719201052a7b2abb88bf/rpds_py-0.27.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2a81bdcfde4245468f7030a75a37d50400ac2455c3a4819d9d550c937f90ab5", size = 230256, upload-time = "2025-08-07T08:26:01.07Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/72ab5b911fdcc48058359b0e786e5363e3fde885156116026f1a2ba9a5b5/rpds_py-0.27.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e6491658dd2569f05860bad645569145c8626ac231877b0fb2d5f9bcb7054089", size = 371658, upload-time = "2025-08-07T08:26:02.369Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4b/90ff04b4da055db53d8fea57640d8d5d55456343a1ec9a866c0ecfe10fd1/rpds_py-0.27.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec77545d188f8bdd29d42bccb9191682a46fb2e655e3d1fb446d47c55ac3b8d", size = 355529, upload-time = "2025-08-07T08:26:03.83Z" }, + { url = "https://files.pythonhosted.org/packages/a4/be/527491fb1afcd86fc5ce5812eb37bc70428ee017d77fee20de18155c3937/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a4aebf8ca02bbb90a9b3e7a463bbf3bee02ab1c446840ca07b1695a68ce424", size = 382822, upload-time = "2025-08-07T08:26:05.52Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a5/dcdb8725ce11e6d0913e6fcf782a13f4b8a517e8acc70946031830b98441/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44524b96481a4c9b8e6c46d6afe43fa1fb485c261e359fbe32b63ff60e3884d8", size = 397233, upload-time = "2025-08-07T08:26:07.179Z" }, + { url = "https://files.pythonhosted.org/packages/33/f9/0947920d1927e9f144660590cc38cadb0795d78fe0d9aae0ef71c1513b7c/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45d04a73c54b6a5fd2bab91a4b5bc8b426949586e61340e212a8484919183859", size = 514892, upload-time = "2025-08-07T08:26:08.622Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ed/d1343398c1417c68f8daa1afce56ef6ce5cc587daaf98e29347b00a80ff2/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:343cf24de9ed6c728abefc5d5c851d5de06497caa7ac37e5e65dd572921ed1b5", size = 402733, upload-time = "2025-08-07T08:26:10.433Z" }, + { url = "https://files.pythonhosted.org/packages/1d/0b/646f55442cd14014fb64d143428f25667a100f82092c90087b9ea7101c74/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aed8118ae20515974650d08eb724150dc2e20c2814bcc307089569995e88a14", size = 384447, upload-time = "2025-08-07T08:26:11.847Z" }, + { url = "https://files.pythonhosted.org/packages/4b/15/0596ef7529828e33a6c81ecf5013d1dd33a511a3e0be0561f83079cda227/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:af9d4fd79ee1cc8e7caf693ee02737daabfc0fcf2773ca0a4735b356c8ad6f7c", size = 402502, upload-time = "2025-08-07T08:26:13.537Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8d/986af3c42f8454a6cafff8729d99fb178ae9b08a9816325ac7a8fa57c0c0/rpds_py-0.27.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f0396e894bd1e66c74ecbc08b4f6a03dc331140942c4b1d345dd131b68574a60", size = 416651, upload-time = "2025-08-07T08:26:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9a/b4ec3629b7b447e896eec574469159b5b60b7781d3711c914748bf32de05/rpds_py-0.27.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:59714ab0a5af25d723d8e9816638faf7f4254234decb7d212715c1aa71eee7be", size = 559460, upload-time = "2025-08-07T08:26:16.295Z" }, + { url = "https://files.pythonhosted.org/packages/61/63/d1e127b40c3e4733b3a6f26ae7a063cdf2bc1caa5272c89075425c7d397a/rpds_py-0.27.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:88051c3b7d5325409f433c5a40328fcb0685fc04e5db49ff936e910901d10114", size = 588072, upload-time = "2025-08-07T08:26:17.776Z" }, + { url = "https://files.pythonhosted.org/packages/04/7e/8ffc71a8f6833d9c9fb999f5b0ee736b8b159fd66968e05c7afc2dbcd57e/rpds_py-0.27.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:181bc29e59e5e5e6e9d63b143ff4d5191224d355e246b5a48c88ce6b35c4e466", size = 555083, upload-time = "2025-08-07T08:26:19.301Z" }, + { url = "https://files.pythonhosted.org/packages/a8/fc/ef6386838e0e91d6ba79b741ccce6ca987e89619aa86f418fecf381eba23/rpds_py-0.27.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9ad08547995a57e74fea6abaf5940d399447935faebbd2612b3b0ca6f987946b", size = 371849, upload-time = "2025-08-07T08:26:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f8/f30394aff811bc0f13fab8d8e4b9f880fcb678234eb0af7d2c4b6232f44f/rpds_py-0.27.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:61490d57e82e23b45c66f96184237994bfafa914433b8cd1a9bb57fecfced59d", size = 356437, upload-time = "2025-08-07T08:26:21.899Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/ed704fc668c9abc56d3686b723e4d6f2585597daf4b68b654ade7c97930d/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7cf5e726b6fa977e428a61880fb108a62f28b6d0c7ef675b117eaff7076df49", size = 382247, upload-time = "2025-08-07T08:26:23.712Z" }, + { url = "https://files.pythonhosted.org/packages/48/55/6ef2c9b7caae3c1c360d9556a70979e16f21bfb1e94f50f481d224f3b8aa/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc662bc9375a6a394b62dfd331874c434819f10ee3902123200dbcf116963f89", size = 397223, upload-time = "2025-08-07T08:26:25.156Z" }, + { url = "https://files.pythonhosted.org/packages/63/04/8fc2059411daaca733155fc2613cc91dc728d7abe31fd0c0fa4c7ec5ff1a/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:299a245537e697f28a7511d01038c310ac74e8ea213c0019e1fc65f52c0dcb23", size = 516308, upload-time = "2025-08-07T08:26:26.585Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d0/b79d3fe07c47bfa989139e692f85371f5a0e1376696b173dabe7ac77b7d1/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be3964f7312ea05ed283b20f87cb533fdc555b2e428cc7be64612c0b2124f08c", size = 401967, upload-time = "2025-08-07T08:26:27.905Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b1/55014f6da5ec8029d1d7d7d2a884b9d7ad7f217e05bb9cb782f06d8209c4/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ba649a6e55ae3808e4c39e01580dc9a9b0d5b02e77b66bb86ef117922b1264", size = 384584, upload-time = "2025-08-07T08:26:29.251Z" }, + { url = "https://files.pythonhosted.org/packages/86/34/5c5c1a8550ac172dd6cd53925c321363d94b2a1f0b3173743dbbfd87b8ec/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:81f81bbd7cdb4bdc418c09a73809abeda8f263a6bf8f9c7f93ed98b5597af39d", size = 401879, upload-time = "2025-08-07T08:26:30.598Z" }, + { url = "https://files.pythonhosted.org/packages/35/07/009bbc659388c4c5a256f05f56df207633cda2f5d61a8d54c50c427e435e/rpds_py-0.27.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11e8e28c0ba0373d052818b600474cfee2fafa6c9f36c8587d217b13ee28ca7d", size = 416908, upload-time = "2025-08-07T08:26:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cc/8949c13dc5a05d955cb88909bfac4004805974dec7b0d02543de55e43272/rpds_py-0.27.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e3acb9c16530362aeaef4e84d57db357002dc5cbfac9a23414c3e73c08301ab2", size = 559105, upload-time = "2025-08-07T08:26:33.53Z" }, + { url = "https://files.pythonhosted.org/packages/ea/40/574da2033b01d6e2e7fa3b021993321565c6634f9d0021707d210ce35b58/rpds_py-0.27.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2e307cb5f66c59ede95c00e93cd84190a5b7f3533d7953690b2036780622ba81", size = 588335, upload-time = "2025-08-07T08:26:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/1d/83/72ed1ce357d8c63bde0bba2458a502e7cc4e150e272139161e1d205a9d67/rpds_py-0.27.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f09c9d4c26fa79c1bad927efb05aca2391350b8e61c38cbc0d7d3c814e463124", size = 555094, upload-time = "2025-08-07T08:26:36.838Z" }, + { url = "https://files.pythonhosted.org/packages/6f/15/fc639de53b3798340233f37959d252311b30d1834b65a02741e3373407fa/rpds_py-0.27.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af22763a0a1eff106426a6e1f13c4582e0d0ad89c1493ab6c058236174cd6c6a", size = 230031, upload-time = "2025-08-07T08:26:38.332Z" }, +] + [[package]] name = "ruff" version = "0.12.9" @@ -2890,6 +4445,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, ] +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + [[package]] name = "soupsieve" version = "2.7" @@ -2899,6 +4463,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, ] +[[package]] +name = "sqlalchemy" +version = "2.0.43" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/4e/985f7da36f09592c5ade99321c72c15101d23c0bb7eecfd1daaca5714422/sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069", size = 2133162, upload-time = "2025-08-11T15:52:17.854Z" }, + { url = "https://files.pythonhosted.org/packages/37/34/798af8db3cae069461e3bc0898a1610dc469386a97048471d364dc8aae1c/sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154", size = 2123082, upload-time = "2025-08-11T15:52:19.181Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0f/79cf4d9dad42f61ec5af1e022c92f66c2d110b93bb1dc9b033892971abfa/sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612", size = 3208871, upload-time = "2025-08-11T15:50:30.656Z" }, + { url = "https://files.pythonhosted.org/packages/56/b3/59befa58fb0e1a9802c87df02344548e6d007e77e87e6084e2131c29e033/sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019", size = 3209583, upload-time = "2025-08-11T15:57:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/29/d2/124b50c0eb8146e8f0fe16d01026c1a073844f0b454436d8544fe9b33bd7/sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20", size = 3148177, upload-time = "2025-08-11T15:50:32.078Z" }, + { url = "https://files.pythonhosted.org/packages/83/f5/e369cd46aa84278107624617034a5825fedfc5c958b2836310ced4d2eadf/sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18", size = 3172276, upload-time = "2025-08-11T15:57:49.477Z" }, + { url = "https://files.pythonhosted.org/packages/de/2b/4602bf4c3477fa4c837c9774e6dd22e0389fc52310c4c4dfb7e7ba05e90d/sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00", size = 2101491, upload-time = "2025-08-11T15:54:59.191Z" }, + { url = "https://files.pythonhosted.org/packages/38/2d/bfc6b6143adef553a08295490ddc52607ee435b9c751c714620c1b3dd44d/sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b", size = 2125148, upload-time = "2025-08-11T15:55:00.593Z" }, + { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, + { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, + { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, + { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, + { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, + { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, + { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, + { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, + { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, + { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, + { url = "https://files.pythonhosted.org/packages/41/1c/a7260bd47a6fae7e03768bf66451437b36451143f36b285522b865987ced/sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3", size = 2130598, upload-time = "2025-08-11T15:51:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/8e/84/8a337454e82388283830b3586ad7847aa9c76fdd4f1df09cdd1f94591873/sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa", size = 2118415, upload-time = "2025-08-11T15:51:17.256Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ff/22ab2328148492c4d71899d62a0e65370ea66c877aea017a244a35733685/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9", size = 3248707, upload-time = "2025-08-11T15:52:38.444Z" }, + { url = "https://files.pythonhosted.org/packages/dc/29/11ae2c2b981de60187f7cbc84277d9d21f101093d1b2e945c63774477aba/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f", size = 3253602, upload-time = "2025-08-11T15:56:37.348Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/987b6c23b12c56d2be451bc70900f67dd7d989d52b1ee64f239cf19aec69/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738", size = 3183248, upload-time = "2025-08-11T15:52:39.865Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/29d216002d4593c2ce1c0ec2cec46dda77bfbcd221e24caa6e85eff53d89/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164", size = 3219363, upload-time = "2025-08-11T15:56:39.11Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e4/bd78b01919c524f190b4905d47e7630bf4130b9f48fd971ae1c6225b6f6a/sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d", size = 2096718, upload-time = "2025-08-11T15:55:05.349Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a5/ca2f07a2a201f9497de1928f787926613db6307992fe5cda97624eb07c2f/sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197", size = 2123200, upload-time = "2025-08-11T15:55:07.932Z" }, + { url = "https://files.pythonhosted.org/packages/92/95/ddb5acf74a71e0fa4f9410c7d8555f169204ae054a49693b3cd31d0bf504/sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7", size = 2136445, upload-time = "2025-08-12T17:29:06.145Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d4/7d7ea7dfbc1ddb0aa54dd63a686cd43842192b8e1bfb5315bb052925f704/sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf", size = 2126411, upload-time = "2025-08-12T17:29:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/07/bd/123ba09bec14112de10e49d8835e6561feb24fd34131099d98d28d34f106/sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad", size = 3221776, upload-time = "2025-08-11T16:00:30.938Z" }, + { url = "https://files.pythonhosted.org/packages/ae/35/553e45d5b91b15980c13e1dbcd7591f49047589843fff903c086d7985afb/sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34", size = 3221665, upload-time = "2025-08-12T17:29:11.307Z" }, + { url = "https://files.pythonhosted.org/packages/07/4d/ff03e516087251da99bd879b5fdb2c697ff20295c836318dda988e12ec19/sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7", size = 3160067, upload-time = "2025-08-11T16:00:33.148Z" }, + { url = "https://files.pythonhosted.org/packages/ae/88/cbc7caa186ecdc5dea013e9ccc00d78b93a6638dc39656a42369a9536458/sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b", size = 3184462, upload-time = "2025-08-12T17:29:14.919Z" }, + { url = "https://files.pythonhosted.org/packages/ab/69/f8bbd43080b6fa75cb44ff3a1cc99aaae538dd0ade1a58206912b2565d72/sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414", size = 2104031, upload-time = "2025-08-11T15:48:56.453Z" }, + { url = "https://files.pythonhosted.org/packages/36/39/2ec1b0e7a4f44d833d924e7bfca8054c72e37eb73f4d02795d16d8b0230a/sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b", size = 2128007, upload-time = "2025-08-11T15:48:57.872Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, +] + [[package]] name = "stack-data" version = "0.6.3" @@ -2922,6 +4539,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, ] +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + [[package]] name = "tiktoken" version = "0.11.0" @@ -2964,6 +4590,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bb/ad/ca37e15c46741ebb3904d562d03194e845539a08f7751a6df0f391757312/tiktoken-0.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:e363f33c720a055586f730c00e330df4c7ea0024bf1c83a8a9a9dbc054c4f304", size = 884702, upload-time = "2025-08-08T23:58:07.534Z" }, ] +[[package]] +name = "tokenizers" +version = "0.21.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/2f/402986d0823f8d7ca139d969af2917fefaa9b947d1fb32f6168c509f2492/tokenizers-0.21.4.tar.gz", hash = "sha256:fa23f85fbc9a02ec5c6978da172cdcbac23498c3ca9f3645c5c68740ac007880", size = 351253, upload-time = "2025-07-28T15:48:54.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/c6/fdb6f72bf6454f52eb4a2510be7fb0f614e541a2554d6210e370d85efff4/tokenizers-0.21.4-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2ccc10a7c3bcefe0f242867dc914fc1226ee44321eb618cfe3019b5df3400133", size = 2863987, upload-time = "2025-07-28T15:48:44.877Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a6/28975479e35ddc751dc1ddc97b9b69bf7fcf074db31548aab37f8116674c/tokenizers-0.21.4-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:5e2f601a8e0cd5be5cc7506b20a79112370b9b3e9cb5f13f68ab11acd6ca7d60", size = 2732457, upload-time = "2025-07-28T15:48:43.265Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8f/24f39d7b5c726b7b0be95dca04f344df278a3fe3a4deb15a975d194cbb32/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b376f5a1aee67b4d29032ee85511bbd1b99007ec735f7f35c8a2eb104eade5", size = 3012624, upload-time = "2025-07-28T13:22:43.895Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/26358925717687a58cb74d7a508de96649544fad5778f0cd9827398dc499/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2107ad649e2cda4488d41dfd031469e9da3fcbfd6183e74e4958fa729ffbf9c6", size = 2939681, upload-time = "2025-07-28T13:22:47.499Z" }, + { url = "https://files.pythonhosted.org/packages/99/6f/cc300fea5db2ab5ddc2c8aea5757a27b89c84469899710c3aeddc1d39801/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c73012da95afafdf235ba80047699df4384fdc481527448a078ffd00e45a7d9", size = 3247445, upload-time = "2025-07-28T15:48:39.711Z" }, + { url = "https://files.pythonhosted.org/packages/be/bf/98cb4b9c3c4afd8be89cfa6423704337dc20b73eb4180397a6e0d456c334/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f23186c40395fc390d27f519679a58023f368a0aad234af145e0f39ad1212732", size = 3428014, upload-time = "2025-07-28T13:22:49.569Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/96c1cc780e6ca7f01a57c13235dd05b7bc1c0f3588512ebe9d1331b5f5ae/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc88bb34e23a54cc42713d6d98af5f1bf79c07653d24fe984d2d695ba2c922a2", size = 3193197, upload-time = "2025-07-28T13:22:51.471Z" }, + { url = "https://files.pythonhosted.org/packages/f2/90/273b6c7ec78af547694eddeea9e05de771278bd20476525ab930cecaf7d8/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51b7eabb104f46c1c50b486520555715457ae833d5aee9ff6ae853d1130506ff", size = 3115426, upload-time = "2025-07-28T15:48:41.439Z" }, + { url = "https://files.pythonhosted.org/packages/91/43/c640d5a07e95f1cf9d2c92501f20a25f179ac53a4f71e1489a3dcfcc67ee/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:714b05b2e1af1288bd1bc56ce496c4cebb64a20d158ee802887757791191e6e2", size = 9089127, upload-time = "2025-07-28T15:48:46.472Z" }, + { url = "https://files.pythonhosted.org/packages/44/a1/dd23edd6271d4dca788e5200a807b49ec3e6987815cd9d0a07ad9c96c7c2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:1340ff877ceedfa937544b7d79f5b7becf33a4cfb58f89b3b49927004ef66f78", size = 9055243, upload-time = "2025-07-28T15:48:48.539Z" }, + { url = "https://files.pythonhosted.org/packages/21/2b/b410d6e9021c4b7ddb57248304dc817c4d4970b73b6ee343674914701197/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:3c1f4317576e465ac9ef0d165b247825a2a4078bcd01cba6b54b867bdf9fdd8b", size = 9298237, upload-time = "2025-07-28T15:48:50.443Z" }, + { url = "https://files.pythonhosted.org/packages/b7/0a/42348c995c67e2e6e5c89ffb9cfd68507cbaeb84ff39c49ee6e0a6dd0fd2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c212aa4e45ec0bb5274b16b6f31dd3f1c41944025c2358faaa5782c754e84c24", size = 9461980, upload-time = "2025-07-28T15:48:52.325Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d3/dacccd834404cd71b5c334882f3ba40331ad2120e69ded32cf5fda9a7436/tokenizers-0.21.4-cp39-abi3-win32.whl", hash = "sha256:6c42a930bc5f4c47f4ea775c91de47d27910881902b0f20e4990ebe045a415d0", size = 2329871, upload-time = "2025-07-28T15:48:56.841Z" }, + { url = "https://files.pythonhosted.org/packages/41/f2/fd673d979185f5dcbac4be7d09461cbb99751554ffb6718d0013af8604cb/tokenizers-0.21.4-cp39-abi3-win_amd64.whl", hash = "sha256:475d807a5c3eb72c59ad9b5fcdb254f6e17f53dfcbb9903233b0dfa9c943b597", size = 2507568, upload-time = "2025-07-28T15:48:55.456Z" }, +] + [[package]] name = "tomli" version = "2.2.1" @@ -3136,6 +4787,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + [[package]] name = "tzdata" version = "2025.2" @@ -3165,6 +4828,76 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/d3/13adff37f15489c784cc7669c35a6c3bf94b87540229eedf52ef2a1d0175/ua_parser_builtins-0.18.0.post1-py3-none-any.whl", hash = "sha256:eb4f93504040c3a990a6b0742a2afd540d87d7f9f05fd66e94c101db1564674d", size = 86077, upload-time = "2024-12-05T18:44:36.732Z" }, ] +[[package]] +name = "ujson" +version = "5.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/00/3110fd566786bfa542adb7932d62035e0c0ef662a8ff6544b6643b3d6fd7/ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1", size = 7154885, upload-time = "2024-05-14T02:02:34.233Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/91/91678e49a9194f527e60115db84368c237ac7824992224fac47dcb23a5c6/ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd", size = 55354, upload-time = "2024-05-14T02:00:27.054Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/1ed8c9b782fa4f44c26c1c4ec686d728a4865479da5712955daeef0b2e7b/ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf", size = 51808, upload-time = "2024-05-14T02:00:29.461Z" }, + { url = "https://files.pythonhosted.org/packages/51/bf/a3a38b2912288143e8e613c6c4c3f798b5e4e98c542deabf94c60237235f/ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6", size = 51995, upload-time = "2024-05-14T02:00:30.93Z" }, + { url = "https://files.pythonhosted.org/packages/b4/6d/0df8f7a6f1944ba619d93025ce468c9252aa10799d7140e07014dfc1a16c/ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569", size = 53566, upload-time = "2024-05-14T02:00:33.091Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ec/370741e5e30d5f7dc7f31a478d5bec7537ce6bfb7f85e72acefbe09aa2b2/ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770", size = 58499, upload-time = "2024-05-14T02:00:34.742Z" }, + { url = "https://files.pythonhosted.org/packages/fe/29/72b33a88f7fae3c398f9ba3e74dc2e5875989b25f1c1f75489c048a2cf4e/ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1", size = 997881, upload-time = "2024-05-14T02:00:36.492Z" }, + { url = "https://files.pythonhosted.org/packages/70/5c/808fbf21470e7045d56a282cf5e85a0450eacdb347d871d4eb404270ee17/ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5", size = 1140631, upload-time = "2024-05-14T02:00:38.995Z" }, + { url = "https://files.pythonhosted.org/packages/8f/6a/e1e8281408e6270d6ecf2375af14d9e2f41c402ab6b161ecfa87a9727777/ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51", size = 1043511, upload-time = "2024-05-14T02:00:41.352Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ca/e319acbe4863919ec62498bc1325309f5c14a3280318dca10fe1db3cb393/ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518", size = 38626, upload-time = "2024-05-14T02:00:43.483Z" }, + { url = "https://files.pythonhosted.org/packages/78/ec/dc96ca379de33f73b758d72e821ee4f129ccc32221f4eb3f089ff78d8370/ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f", size = 42076, upload-time = "2024-05-14T02:00:46.56Z" }, + { url = "https://files.pythonhosted.org/packages/23/ec/3c551ecfe048bcb3948725251fb0214b5844a12aa60bee08d78315bb1c39/ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00", size = 55353, upload-time = "2024-05-14T02:00:48.04Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9f/4731ef0671a0653e9f5ba18db7c4596d8ecbf80c7922dd5fe4150f1aea76/ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126", size = 51813, upload-time = "2024-05-14T02:00:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2b/44d6b9c1688330bf011f9abfdb08911a9dc74f76926dde74e718d87600da/ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8", size = 51988, upload-time = "2024-05-14T02:00:50.484Z" }, + { url = "https://files.pythonhosted.org/packages/29/45/f5f5667427c1ec3383478092a414063ddd0dfbebbcc533538fe37068a0a3/ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b", size = 53561, upload-time = "2024-05-14T02:00:52.146Z" }, + { url = "https://files.pythonhosted.org/packages/26/21/a0c265cda4dd225ec1be595f844661732c13560ad06378760036fc622587/ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9", size = 58497, upload-time = "2024-05-14T02:00:53.366Z" }, + { url = "https://files.pythonhosted.org/packages/28/36/8fde862094fd2342ccc427a6a8584fed294055fdee341661c78660f7aef3/ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f", size = 997877, upload-time = "2024-05-14T02:00:55.095Z" }, + { url = "https://files.pythonhosted.org/packages/90/37/9208e40d53baa6da9b6a1c719e0670c3f474c8fc7cc2f1e939ec21c1bc93/ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4", size = 1140632, upload-time = "2024-05-14T02:00:57.099Z" }, + { url = "https://files.pythonhosted.org/packages/89/d5/2626c87c59802863d44d19e35ad16b7e658e4ac190b0dead17ff25460b4c/ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1", size = 1043513, upload-time = "2024-05-14T02:00:58.488Z" }, + { url = "https://files.pythonhosted.org/packages/2f/ee/03662ce9b3f16855770f0d70f10f0978ba6210805aa310c4eebe66d36476/ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f", size = 38616, upload-time = "2024-05-14T02:01:00.463Z" }, + { url = "https://files.pythonhosted.org/packages/3e/20/952dbed5895835ea0b82e81a7be4ebb83f93b079d4d1ead93fcddb3075af/ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720", size = 42071, upload-time = "2024-05-14T02:01:02.211Z" }, + { url = "https://files.pythonhosted.org/packages/e8/a6/fd3f8bbd80842267e2d06c3583279555e8354c5986c952385199d57a5b6c/ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5", size = 55642, upload-time = "2024-05-14T02:01:04.055Z" }, + { url = "https://files.pythonhosted.org/packages/a8/47/dd03fd2b5ae727e16d5d18919b383959c6d269c7b948a380fdd879518640/ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e", size = 51807, upload-time = "2024-05-14T02:01:05.25Z" }, + { url = "https://files.pythonhosted.org/packages/25/23/079a4cc6fd7e2655a473ed9e776ddbb7144e27f04e8fc484a0fb45fe6f71/ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043", size = 51972, upload-time = "2024-05-14T02:01:06.458Z" }, + { url = "https://files.pythonhosted.org/packages/04/81/668707e5f2177791869b624be4c06fb2473bf97ee33296b18d1cf3092af7/ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1", size = 53686, upload-time = "2024-05-14T02:01:07.618Z" }, + { url = "https://files.pythonhosted.org/packages/bd/50/056d518a386d80aaf4505ccf3cee1c40d312a46901ed494d5711dd939bc3/ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3", size = 58591, upload-time = "2024-05-14T02:01:08.901Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d6/aeaf3e2d6fb1f4cfb6bf25f454d60490ed8146ddc0600fae44bfe7eb5a72/ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21", size = 997853, upload-time = "2024-05-14T02:01:10.772Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d5/1f2a5d2699f447f7d990334ca96e90065ea7f99b142ce96e85f26d7e78e2/ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2", size = 1140689, upload-time = "2024-05-14T02:01:12.214Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2c/6990f4ccb41ed93744aaaa3786394bca0875503f97690622f3cafc0adfde/ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e", size = 1043576, upload-time = "2024-05-14T02:01:14.39Z" }, + { url = "https://files.pythonhosted.org/packages/14/f5/a2368463dbb09fbdbf6a696062d0c0f62e4ae6fa65f38f829611da2e8fdd/ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e", size = 38764, upload-time = "2024-05-14T02:01:15.83Z" }, + { url = "https://files.pythonhosted.org/packages/59/2d/691f741ffd72b6c84438a93749ac57bf1a3f217ac4b0ea4fd0e96119e118/ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc", size = 42211, upload-time = "2024-05-14T02:01:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/0d/69/b3e3f924bb0e8820bb46671979770c5be6a7d51c77a66324cdb09f1acddb/ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287", size = 55646, upload-time = "2024-05-14T02:01:19.26Z" }, + { url = "https://files.pythonhosted.org/packages/32/8a/9b748eb543c6cabc54ebeaa1f28035b1bd09c0800235b08e85990734c41e/ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e", size = 51806, upload-time = "2024-05-14T02:01:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/39/50/4b53ea234413b710a18b305f465b328e306ba9592e13a791a6a6b378869b/ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557", size = 51975, upload-time = "2024-05-14T02:01:21.904Z" }, + { url = "https://files.pythonhosted.org/packages/b4/9d/8061934f960cdb6dd55f0b3ceeff207fcc48c64f58b43403777ad5623d9e/ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988", size = 53693, upload-time = "2024-05-14T02:01:23.742Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/7bfa84b28519ddbb67efc8410765ca7da55e6b93aba84d97764cd5794dbc/ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816", size = 58594, upload-time = "2024-05-14T02:01:25.554Z" }, + { url = "https://files.pythonhosted.org/packages/48/eb/85d465abafb2c69d9699cfa5520e6e96561db787d36c677370e066c7e2e7/ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20", size = 997853, upload-time = "2024-05-14T02:01:27.151Z" }, + { url = "https://files.pythonhosted.org/packages/9f/76/2a63409fc05d34dd7d929357b7a45e3a2c96f22b4225cd74becd2ba6c4cb/ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0", size = 1140694, upload-time = "2024-05-14T02:01:29.113Z" }, + { url = "https://files.pythonhosted.org/packages/45/ed/582c4daba0f3e1688d923b5cb914ada1f9defa702df38a1916c899f7c4d1/ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f", size = 1043580, upload-time = "2024-05-14T02:01:31.447Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0c/9837fece153051e19c7bade9f88f9b409e026b9525927824cdf16293b43b/ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165", size = 38766, upload-time = "2024-05-14T02:01:32.856Z" }, + { url = "https://files.pythonhosted.org/packages/d7/72/6cb6728e2738c05bbe9bd522d6fc79f86b9a28402f38663e85a28fddd4a0/ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539", size = 42212, upload-time = "2024-05-14T02:01:33.97Z" }, + { url = "https://files.pythonhosted.org/packages/97/94/50ff2f1b61d668907f20216873640ab19e0eaa77b51e64ee893f6adfb266/ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b", size = 55421, upload-time = "2024-05-14T02:01:49.765Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/3d2ca621d8dbeaf6c5afd0725e1b4bbd465077acc69eff1e9302735d1432/ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27", size = 51816, upload-time = "2024-05-14T02:01:51.047Z" }, + { url = "https://files.pythonhosted.org/packages/8d/af/5dc103cb4d08f051f82d162a738adb9da488d1e3fafb9fd9290ea3eabf8e/ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76", size = 52023, upload-time = "2024-05-14T02:01:53.072Z" }, + { url = "https://files.pythonhosted.org/packages/5d/dd/b9a6027ba782b0072bf24a70929e15a58686668c32a37aebfcfaa9e00bdd/ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5", size = 53622, upload-time = "2024-05-14T02:01:54.738Z" }, + { url = "https://files.pythonhosted.org/packages/1f/28/bcf6df25c1a9f1989dc2ddc4ac8a80e246857e089f91a9079fd8a0a01459/ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0", size = 58563, upload-time = "2024-05-14T02:01:55.991Z" }, + { url = "https://files.pythonhosted.org/packages/9e/82/89404453a102d06d0937f6807c0a7ef2eec68b200b4ce4386127f3c28156/ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1", size = 998050, upload-time = "2024-05-14T02:01:57.8Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/2a4ea07165cad217bc842bb684b053bafa8ffdb818c47911c621e97a33fc/ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1", size = 1140672, upload-time = "2024-05-14T02:01:59.875Z" }, + { url = "https://files.pythonhosted.org/packages/72/53/d7bdf6afabeba3ed899f89d993c7f202481fa291d8c5be031c98a181eda4/ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996", size = 1043577, upload-time = "2024-05-14T02:02:02.138Z" }, + { url = "https://files.pythonhosted.org/packages/19/b1/75f5f0d18501fd34487e46829de3070724c7b350f1983ba7f07e0986720b/ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9", size = 38654, upload-time = "2024-05-14T02:02:03.71Z" }, + { url = "https://files.pythonhosted.org/packages/77/0d/50d2f9238f6d6683ead5ecd32d83d53f093a3c0047ae4c720b6d586cb80d/ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a", size = 42134, upload-time = "2024-05-14T02:02:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/95/53/e5f5e733fc3525e65f36f533b0dbece5e5e2730b760e9beacf7e3d9d8b26/ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64", size = 51846, upload-time = "2024-05-14T02:02:06.347Z" }, + { url = "https://files.pythonhosted.org/packages/59/1f/f7bc02a54ea7b47f3dc2d125a106408f18b0f47b14fc737f0913483ae82b/ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3", size = 48103, upload-time = "2024-05-14T02:02:07.777Z" }, + { url = "https://files.pythonhosted.org/packages/1a/3a/d3921b6f29bc744d8d6c56db5f8bbcbe55115fd0f2b79c3c43ff292cc7c9/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a", size = 47257, upload-time = "2024-05-14T02:02:09.46Z" }, + { url = "https://files.pythonhosted.org/packages/f1/04/f4e3883204b786717038064afd537389ba7d31a72b437c1372297cb651ea/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746", size = 48468, upload-time = "2024-05-14T02:02:10.768Z" }, + { url = "https://files.pythonhosted.org/packages/17/cd/9c6547169eb01a22b04cbb638804ccaeb3c2ec2afc12303464e0f9b2ee5a/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88", size = 54266, upload-time = "2024-05-14T02:02:12.109Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/ecd14d3cf6127f8a990b01f0ad20e257f5619a555f47d707c57d39934894/ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b", size = 42224, upload-time = "2024-05-14T02:02:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/a3a2356ca5a4b67fe32a0c31e49226114d5154ba2464bb1220a93eb383e8/ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4", size = 51855, upload-time = "2024-05-14T02:02:22.164Z" }, + { url = "https://files.pythonhosted.org/packages/73/3d/41e78e7500e75eb6b5a7ab06907a6df35603b92ac6f939b86f40e9fe2c06/ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8", size = 48059, upload-time = "2024-05-14T02:02:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/be/14/e435cbe5b5189483adbba5fe328e88418ccd54b2b1f74baa4172384bb5cd/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b", size = 47238, upload-time = "2024-05-14T02:02:24.873Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d9/b6f4d1e6bec20a3b582b48f64eaa25209fd70dc2892b21656b273bc23434/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804", size = 48457, upload-time = "2024-05-14T02:02:26.186Z" }, + { url = "https://files.pythonhosted.org/packages/23/1c/cfefabb5996e21a1a4348852df7eb7cfc69299143739e86e5b1071c78735/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e", size = 54238, upload-time = "2024-05-14T02:02:28.468Z" }, + { url = "https://files.pythonhosted.org/packages/af/c4/fa70e77e1c27bbaf682d790bd09ef40e86807ada704c528ef3ea3418d439/ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7", size = 42230, upload-time = "2024-05-14T02:02:29.678Z" }, +] + [[package]] name = "urllib3" version = "2.5.0" @@ -3224,6 +4957,215 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, ] +[[package]] +name = "xxhash" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241, upload-time = "2024-08-17T09:20:38.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/8a/0e9feca390d512d293afd844d31670e25608c4a901e10202aa98785eab09/xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212", size = 31970, upload-time = "2024-08-17T09:17:35.675Z" }, + { url = "https://files.pythonhosted.org/packages/16/e6/be5aa49580cd064a18200ab78e29b88b1127e1a8c7955eb8ecf81f2626eb/xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520", size = 30801, upload-time = "2024-08-17T09:17:37.353Z" }, + { url = "https://files.pythonhosted.org/packages/20/ee/b8a99ebbc6d1113b3a3f09e747fa318c3cde5b04bd9c197688fadf0eeae8/xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680", size = 220927, upload-time = "2024-08-17T09:17:38.835Z" }, + { url = "https://files.pythonhosted.org/packages/58/62/15d10582ef159283a5c2b47f6d799fc3303fe3911d5bb0bcc820e1ef7ff4/xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da", size = 200360, upload-time = "2024-08-17T09:17:40.851Z" }, + { url = "https://files.pythonhosted.org/packages/23/41/61202663ea9b1bd8e53673b8ec9e2619989353dba8cfb68e59a9cbd9ffe3/xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23", size = 428528, upload-time = "2024-08-17T09:17:42.545Z" }, + { url = "https://files.pythonhosted.org/packages/f2/07/d9a3059f702dec5b3b703737afb6dda32f304f6e9da181a229dafd052c29/xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196", size = 194149, upload-time = "2024-08-17T09:17:44.361Z" }, + { url = "https://files.pythonhosted.org/packages/eb/58/27caadf78226ecf1d62dbd0c01d152ed381c14c1ee4ad01f0d460fc40eac/xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c", size = 207703, upload-time = "2024-08-17T09:17:46.656Z" }, + { url = "https://files.pythonhosted.org/packages/b1/08/32d558ce23e1e068453c39aed7b3c1cdc690c177873ec0ca3a90d5808765/xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482", size = 216255, upload-time = "2024-08-17T09:17:48.031Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d4/2b971e2d2b0a61045f842b622ef11e94096cf1f12cd448b6fd426e80e0e2/xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296", size = 202744, upload-time = "2024-08-17T09:17:50.045Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/6a6438864a8c4c39915d7b65effd85392ebe22710412902487e51769146d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415", size = 210115, upload-time = "2024-08-17T09:17:51.834Z" }, + { url = "https://files.pythonhosted.org/packages/48/7d/b3c27c27d1fc868094d02fe4498ccce8cec9fcc591825c01d6bcb0b4fc49/xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198", size = 414247, upload-time = "2024-08-17T09:17:53.094Z" }, + { url = "https://files.pythonhosted.org/packages/a1/05/918f9e7d2fbbd334b829997045d341d6239b563c44e683b9a7ef8fe50f5d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442", size = 191419, upload-time = "2024-08-17T09:17:54.906Z" }, + { url = "https://files.pythonhosted.org/packages/08/29/dfe393805b2f86bfc47c290b275f0b7c189dc2f4e136fd4754f32eb18a8d/xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da", size = 30114, upload-time = "2024-08-17T09:17:56.566Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d7/aa0b22c4ebb7c3ccb993d4c565132abc641cd11164f8952d89eb6a501909/xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9", size = 30003, upload-time = "2024-08-17T09:17:57.596Z" }, + { url = "https://files.pythonhosted.org/packages/69/12/f969b81541ee91b55f1ce469d7ab55079593c80d04fd01691b550e535000/xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6", size = 26773, upload-time = "2024-08-17T09:17:59.169Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/afed0f131fbda960ff15eee7f304fa0eeb2d58770fade99897984852ef23/xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1", size = 31969, upload-time = "2024-08-17T09:18:00.852Z" }, + { url = "https://files.pythonhosted.org/packages/8c/0c/7c3bc6d87e5235672fcc2fb42fd5ad79fe1033925f71bf549ee068c7d1ca/xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8", size = 30800, upload-time = "2024-08-17T09:18:01.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/9e/01067981d98069eec1c20201f8c145367698e9056f8bc295346e4ea32dd1/xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166", size = 221566, upload-time = "2024-08-17T09:18:03.461Z" }, + { url = "https://files.pythonhosted.org/packages/d4/09/d4996de4059c3ce5342b6e1e6a77c9d6c91acce31f6ed979891872dd162b/xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7", size = 201214, upload-time = "2024-08-17T09:18:05.616Z" }, + { url = "https://files.pythonhosted.org/packages/62/f5/6d2dc9f8d55a7ce0f5e7bfef916e67536f01b85d32a9fbf137d4cadbee38/xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623", size = 429433, upload-time = "2024-08-17T09:18:06.957Z" }, + { url = "https://files.pythonhosted.org/packages/d9/72/9256303f10e41ab004799a4aa74b80b3c5977d6383ae4550548b24bd1971/xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a", size = 194822, upload-time = "2024-08-17T09:18:08.331Z" }, + { url = "https://files.pythonhosted.org/packages/34/92/1a3a29acd08248a34b0e6a94f4e0ed9b8379a4ff471f1668e4dce7bdbaa8/xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88", size = 208538, upload-time = "2024-08-17T09:18:10.332Z" }, + { url = "https://files.pythonhosted.org/packages/53/ad/7fa1a109663366de42f724a1cdb8e796a260dbac45047bce153bc1e18abf/xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c", size = 216953, upload-time = "2024-08-17T09:18:11.707Z" }, + { url = "https://files.pythonhosted.org/packages/35/02/137300e24203bf2b2a49b48ce898ecce6fd01789c0fcd9c686c0a002d129/xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2", size = 203594, upload-time = "2024-08-17T09:18:13.799Z" }, + { url = "https://files.pythonhosted.org/packages/23/03/aeceb273933d7eee248c4322b98b8e971f06cc3880e5f7602c94e5578af5/xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084", size = 210971, upload-time = "2024-08-17T09:18:15.824Z" }, + { url = "https://files.pythonhosted.org/packages/e3/64/ed82ec09489474cbb35c716b189ddc1521d8b3de12b1b5ab41ce7f70253c/xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d", size = 415050, upload-time = "2024-08-17T09:18:17.142Z" }, + { url = "https://files.pythonhosted.org/packages/71/43/6db4c02dcb488ad4e03bc86d70506c3d40a384ee73c9b5c93338eb1f3c23/xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839", size = 192216, upload-time = "2024-08-17T09:18:18.779Z" }, + { url = "https://files.pythonhosted.org/packages/22/6d/db4abec29e7a567455344433d095fdb39c97db6955bb4a2c432e486b4d28/xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da", size = 30120, upload-time = "2024-08-17T09:18:20.009Z" }, + { url = "https://files.pythonhosted.org/packages/52/1c/fa3b61c0cf03e1da4767213672efe186b1dfa4fc901a4a694fb184a513d1/xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58", size = 30003, upload-time = "2024-08-17T09:18:21.052Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8e/9e6fc572acf6e1cc7ccb01973c213f895cb8668a9d4c2b58a99350da14b7/xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3", size = 26777, upload-time = "2024-08-17T09:18:22.809Z" }, + { url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00", size = 31969, upload-time = "2024-08-17T09:18:24.025Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9", size = 30787, upload-time = "2024-08-17T09:18:25.318Z" }, + { url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84", size = 220959, upload-time = "2024-08-17T09:18:26.518Z" }, + { url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793", size = 200006, upload-time = "2024-08-17T09:18:27.905Z" }, + { url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be", size = 428326, upload-time = "2024-08-17T09:18:29.335Z" }, + { url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6", size = 194380, upload-time = "2024-08-17T09:18:30.706Z" }, + { url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90", size = 207934, upload-time = "2024-08-17T09:18:32.133Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27", size = 216301, upload-time = "2024-08-17T09:18:33.474Z" }, + { url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2", size = 203351, upload-time = "2024-08-17T09:18:34.889Z" }, + { url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d", size = 210294, upload-time = "2024-08-17T09:18:36.355Z" }, + { url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab", size = 414674, upload-time = "2024-08-17T09:18:38.536Z" }, + { url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e", size = 192022, upload-time = "2024-08-17T09:18:40.138Z" }, + { url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8", size = 30170, upload-time = "2024-08-17T09:18:42.163Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e", size = 30040, upload-time = "2024-08-17T09:18:43.699Z" }, + { url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2", size = 26796, upload-time = "2024-08-17T09:18:45.29Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795, upload-time = "2024-08-17T09:18:46.813Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792, upload-time = "2024-08-17T09:18:47.862Z" }, + { url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc", size = 220950, upload-time = "2024-08-17T09:18:49.06Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3", size = 199980, upload-time = "2024-08-17T09:18:50.445Z" }, + { url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c", size = 428324, upload-time = "2024-08-17T09:18:51.988Z" }, + { url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb", size = 194370, upload-time = "2024-08-17T09:18:54.164Z" }, + { url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f", size = 207911, upload-time = "2024-08-17T09:18:55.509Z" }, + { url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7", size = 216352, upload-time = "2024-08-17T09:18:57.073Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326", size = 203410, upload-time = "2024-08-17T09:18:58.54Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf", size = 210322, upload-time = "2024-08-17T09:18:59.943Z" }, + { url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7", size = 414725, upload-time = "2024-08-17T09:19:01.332Z" }, + { url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c", size = 192070, upload-time = "2024-08-17T09:19:03.007Z" }, + { url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637", size = 30172, upload-time = "2024-08-17T09:19:04.355Z" }, + { url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43", size = 30041, upload-time = "2024-08-17T09:19:05.435Z" }, + { url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b", size = 26801, upload-time = "2024-08-17T09:19:06.547Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f6/531dd6858adf8877675270b9d6989b6dacfd1c2d7135b17584fc29866df3/xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301", size = 31971, upload-time = "2024-08-17T09:19:47.447Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a8/b2a42b6c9ae46e233f474f3d307c2e7bca8d9817650babeca048d2ad01d6/xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab", size = 30801, upload-time = "2024-08-17T09:19:48.911Z" }, + { url = "https://files.pythonhosted.org/packages/b4/92/9ac297e3487818f429bcf369c1c6a097edf5b56ed6fc1feff4c1882e87ef/xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f", size = 220644, upload-time = "2024-08-17T09:19:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/86/48/c1426dd3c86fc4a52f983301867463472f6a9013fb32d15991e60c9919b6/xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd", size = 200021, upload-time = "2024-08-17T09:19:52.923Z" }, + { url = "https://files.pythonhosted.org/packages/f3/de/0ab8c79993765c94fc0d0c1a22b454483c58a0161e1b562f58b654f47660/xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc", size = 428217, upload-time = "2024-08-17T09:19:54.349Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b4/332647451ed7d2c021294b7c1e9c144dbb5586b1fb214ad4f5a404642835/xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754", size = 193868, upload-time = "2024-08-17T09:19:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1c/a42c0a6cac752f84f7b44a90d1a9fa9047cf70bdba5198a304fde7cc471f/xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6", size = 207403, upload-time = "2024-08-17T09:19:57.945Z" }, + { url = "https://files.pythonhosted.org/packages/c4/d7/04e1b0daae9dc9b02c73c1664cc8aa527498c3f66ccbc586eeb25bbe9f14/xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898", size = 215978, upload-time = "2024-08-17T09:19:59.381Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/05e15e67505228fc19ee98a79e427b3a0b9695f5567cd66ced5d66389883/xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833", size = 202416, upload-time = "2024-08-17T09:20:01.534Z" }, + { url = "https://files.pythonhosted.org/packages/94/fb/e9028d3645bba5412a09de13ee36df276a567e60bdb31d499dafa46d76ae/xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6", size = 209853, upload-time = "2024-08-17T09:20:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/02/2c/18c6a622429368274739372d2f86c8125413ec169025c7d8ffb051784bba/xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af", size = 413926, upload-time = "2024-08-17T09:20:04.946Z" }, + { url = "https://files.pythonhosted.org/packages/72/bb/5b55c391084a0321c3809632a018b9b657e59d5966289664f85a645942ac/xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606", size = 191156, upload-time = "2024-08-17T09:20:06.318Z" }, + { url = "https://files.pythonhosted.org/packages/86/2b/915049db13401792fec159f57e4f4a5ca7a9768e83ef71d6645b9d0cd749/xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4", size = 30122, upload-time = "2024-08-17T09:20:07.691Z" }, + { url = "https://files.pythonhosted.org/packages/d5/87/382ef7b24917d7cf4c540ee30f29b283bc87ac5893d2f89b23ea3cdf7d77/xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558", size = 30021, upload-time = "2024-08-17T09:20:08.832Z" }, + { url = "https://files.pythonhosted.org/packages/e2/47/d06b24e2d9c3dcabccfd734d11b5bbebfdf59ceac2c61509d8205dd20ac6/xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e", size = 26780, upload-time = "2024-08-17T09:20:09.989Z" }, + { url = "https://files.pythonhosted.org/packages/ab/9a/233606bada5bd6f50b2b72c45de3d9868ad551e83893d2ac86dc7bb8553a/xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c", size = 29732, upload-time = "2024-08-17T09:20:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/0c/67/f75276ca39e2c6604e3bee6c84e9db8a56a4973fde9bf35989787cf6e8aa/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986", size = 36214, upload-time = "2024-08-17T09:20:12.335Z" }, + { url = "https://files.pythonhosted.org/packages/0f/f8/f6c61fd794229cc3848d144f73754a0c107854372d7261419dcbbd286299/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6", size = 32020, upload-time = "2024-08-17T09:20:13.537Z" }, + { url = "https://files.pythonhosted.org/packages/79/d3/c029c99801526f859e6b38d34ab87c08993bf3dcea34b11275775001638a/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b", size = 40515, upload-time = "2024-08-17T09:20:14.669Z" }, + { url = "https://files.pythonhosted.org/packages/62/e3/bef7b82c1997579c94de9ac5ea7626d01ae5858aa22bf4fcb38bf220cb3e/xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da", size = 30064, upload-time = "2024-08-17T09:20:15.925Z" }, + { url = "https://files.pythonhosted.org/packages/c2/56/30d3df421814947f9d782b20c9b7e5e957f3791cbd89874578011daafcbd/xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9", size = 29734, upload-time = "2024-08-17T09:20:30.457Z" }, + { url = "https://files.pythonhosted.org/packages/82/dd/3c42a1f022ad0d82c852d3cb65493ebac03dcfa8c994465a5fb052b00e3c/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1", size = 36216, upload-time = "2024-08-17T09:20:32.116Z" }, + { url = "https://files.pythonhosted.org/packages/b2/40/8f902ab3bebda228a9b4de69eba988280285a7f7f167b942bc20bb562df9/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f", size = 32042, upload-time = "2024-08-17T09:20:33.562Z" }, + { url = "https://files.pythonhosted.org/packages/db/87/bd06beb8ccaa0e9e577c9b909a49cfa5c5cd2ca46034342d72dd9ce5bc56/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0", size = 40516, upload-time = "2024-08-17T09:20:36.004Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f8/505385e2fbd753ddcaafd5550eabe86f6232cbebabad3b2508d411b19153/xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240", size = 30108, upload-time = "2024-08-17T09:20:37.214Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, + { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, + { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, + { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, + { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, + { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, + { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, + { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, + { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, + { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, + { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/01/75/0d37402d208d025afa6b5b8eb80e466d267d3fd1927db8e317d29a94a4cb/yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3", size = 134259, upload-time = "2025-06-10T00:45:29.882Z" }, + { url = "https://files.pythonhosted.org/packages/73/84/1fb6c85ae0cf9901046f07d0ac9eb162f7ce6d95db541130aa542ed377e6/yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b", size = 91269, upload-time = "2025-06-10T00:45:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/f3/9c/eae746b24c4ea29a5accba9a06c197a70fa38a49c7df244e0d3951108861/yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983", size = 89995, upload-time = "2025-06-10T00:45:35.066Z" }, + { url = "https://files.pythonhosted.org/packages/fb/30/693e71003ec4bc1daf2e4cf7c478c417d0985e0a8e8f00b2230d517876fc/yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805", size = 325253, upload-time = "2025-06-10T00:45:37.052Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a2/5264dbebf90763139aeb0b0b3154763239398400f754ae19a0518b654117/yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba", size = 320897, upload-time = "2025-06-10T00:45:39.962Z" }, + { url = "https://files.pythonhosted.org/packages/e7/17/77c7a89b3c05856489777e922f41db79ab4faf58621886df40d812c7facd/yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e", size = 340696, upload-time = "2025-06-10T00:45:41.915Z" }, + { url = "https://files.pythonhosted.org/packages/6d/55/28409330b8ef5f2f681f5b478150496ec9cf3309b149dab7ec8ab5cfa3f0/yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723", size = 335064, upload-time = "2025-06-10T00:45:43.893Z" }, + { url = "https://files.pythonhosted.org/packages/85/58/cb0257cbd4002828ff735f44d3c5b6966c4fd1fc8cc1cd3cd8a143fbc513/yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000", size = 327256, upload-time = "2025-06-10T00:45:46.393Z" }, + { url = "https://files.pythonhosted.org/packages/53/f6/c77960370cfa46f6fb3d6a5a79a49d3abfdb9ef92556badc2dcd2748bc2a/yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5", size = 316389, upload-time = "2025-06-10T00:45:48.358Z" }, + { url = "https://files.pythonhosted.org/packages/64/ab/be0b10b8e029553c10905b6b00c64ecad3ebc8ace44b02293a62579343f6/yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c", size = 340481, upload-time = "2025-06-10T00:45:50.663Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c3/3f327bd3905a4916029bf5feb7f86dcf864c7704f099715f62155fb386b2/yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240", size = 336941, upload-time = "2025-06-10T00:45:52.554Z" }, + { url = "https://files.pythonhosted.org/packages/d1/42/040bdd5d3b3bb02b4a6ace4ed4075e02f85df964d6e6cb321795d2a6496a/yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee", size = 339936, upload-time = "2025-06-10T00:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1c/911867b8e8c7463b84dfdc275e0d99b04b66ad5132b503f184fe76be8ea4/yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010", size = 360163, upload-time = "2025-06-10T00:45:56.87Z" }, + { url = "https://files.pythonhosted.org/packages/e2/31/8c389f6c6ca0379b57b2da87f1f126c834777b4931c5ee8427dd65d0ff6b/yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8", size = 359108, upload-time = "2025-06-10T00:45:58.869Z" }, + { url = "https://files.pythonhosted.org/packages/7f/09/ae4a649fb3964324c70a3e2b61f45e566d9ffc0affd2b974cbf628957673/yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d", size = 351875, upload-time = "2025-06-10T00:46:01.45Z" }, + { url = "https://files.pythonhosted.org/packages/8d/43/bbb4ed4c34d5bb62b48bf957f68cd43f736f79059d4f85225ab1ef80f4b9/yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06", size = 82293, upload-time = "2025-06-10T00:46:03.763Z" }, + { url = "https://files.pythonhosted.org/packages/d7/cd/ce185848a7dba68ea69e932674b5c1a42a1852123584bccc5443120f857c/yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00", size = 87385, upload-time = "2025-06-10T00:46:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] + [[package]] name = "zipp" version = "3.23.0" From b482f407a7fa08e77c955d77dbbf64d6441ddf70 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Fri, 22 Aug 2025 02:45:19 +0000 Subject: [PATCH 08/20] validation working --- pyproject.toml | 1 + scratch/scripts/benchmark_commits.py | 49 ++-- scratch/scripts/download_dataset.py | 2 +- scratch/scripts/filter_commits.py | 17 +- scratch/scripts/scrape_repositories.py | 2 +- scratch/scripts/validate_containers.py | 167 ++++++++---- src/datasmith/detection/detect_breakpoints.py | 2 +- src/datasmith/docker/Dockerfile | 3 +- src/datasmith/docker/METHOD.md | 24 ++ src/datasmith/docker/context.py | 70 +++++- src/datasmith/docker/context_registry.py | 238 +++++++++++++----- src/datasmith/docker/docker_build.sh | 5 - src/datasmith/docker/entrypoint.sh | 5 +- src/datasmith/docker/orchestrator.py | 76 +++++- src/datasmith/execution/utils.py | 35 +-- src/datasmith/scrape/code_coverage.py | 2 +- src/datasmith/scrape/filter_dashboards.py | 2 +- src/datasmith/scrape/utils.py | 8 +- src/datasmith/utils.py | 6 +- 19 files changed, 538 insertions(+), 176 deletions(-) create mode 100644 src/datasmith/docker/METHOD.md diff --git a/pyproject.toml b/pyproject.toml index 4e6984d..719867f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -125,6 +125,7 @@ ignore = [ [tool.ruff.lint.per-file-ignores] "tests/*" = ["S101"] +"*.py" = ["TRY003"] [tool.ruff.format] preview = true diff --git a/scratch/scripts/benchmark_commits.py b/scratch/scripts/benchmark_commits.py index ae4bd3a..5594575 100644 --- a/scratch/scripts/benchmark_commits.py +++ b/scratch/scripts/benchmark_commits.py @@ -5,6 +5,7 @@ import logging import math import os +import pickle from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path @@ -17,9 +18,8 @@ orchestrate, ) from datasmith.logging_config import configure_logging -from datasmith.scrape.utils import _parse_commit_url -# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log").absolute(), "a")) +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) logger = configure_logging(level=logging.DEBUG) @@ -72,12 +72,31 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() +def process_commits(commits_pth: Path) -> list[tuple[str, str, str]]: + commits = pd.read_json(commits_pth, lines=True) + all_states = {} + for _, row in commits.iterrows(): + repo_name = row["repo_name"] + sha = row["commit_sha"] + has_asv = row.get("has_asv", True) + if not has_asv and "scikit-learn" not in repo_name: + logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + continue + owner, repo = repo_name.split("/") + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {(sha)} + else: + all_states[(owner, repo)].add(sha) + + all_states_list = [(owner, repo, sha) for (owner, repo), shas in all_states.items() for sha in shas] + + return all_states_list + + def main() -> None: args = parse_args() - commits = pd.read_json(args.filtered_commits, lines=True) - commits["repo_name"] = commits["repo_name"].str.lower() - commit_urls = ("https://www.github.com/" + commits["repo_name"] + "/commit/" + commits["commit_sha"]).tolist() + all_states = process_commits(args.filtered_commits) max_concurrency = ( args.max_concurrency if args.max_concurrency != -1 else max(4, math.floor(0.5 * (os.cpu_count() or 1))) @@ -99,14 +118,6 @@ def main() -> None: client = get_docker_client() # Ensure all required Docker images are available - all_states = {} - for owner, repo, sha in map(_parse_commit_url, commit_urls): - if (owner, repo) not in all_states: - all_states[(owner, repo)] = {sha} - else: - all_states[(owner, repo)].add(sha) - - all_states = list(set(map(_parse_commit_url, commit_urls))) docker_image_names = [] with ThreadPoolExecutor(max_workers=args.num_cores * 4) as pool: @@ -119,7 +130,7 @@ def main() -> None: machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] machine_args["num_cpu"] = str(args.num_cores) - asyncio.run( + files_by_image: dict[str, dict[str, str]] = asyncio.run( orchestrate( docker_image_names=docker_image_names, asv_args=asv_args, @@ -130,6 +141,16 @@ def main() -> None: client=client, ) ) + # save the files by image as a pickle file. + with open(output_dir / "files_by_image.pkl", "wb") as f: + pickle.dump(files_by_image, f) + + # save the files by image as a JSON file + output_file = output_dir / "benchmark_results.json" + with open(output_file, "w") as f: + pd.DataFrame.from_dict(files_by_image, orient="index").to_json(f, orient="records", lines=True) + + logger.info("Benchmark results saved to %s", output_file) if __name__ == "__main__": diff --git a/scratch/scripts/download_dataset.py b/scratch/scripts/download_dataset.py index 36ddee8..77bd5f0 100644 --- a/scratch/scripts/download_dataset.py +++ b/scratch/scripts/download_dataset.py @@ -36,7 +36,7 @@ def process_dashboard(row, force: bool) -> tuple[Path, BenchmarkCollection]: base_url=row["url"], html_dir=row["output_dir"], force=force ) if dashboard_collection is None: - raise ValueError(f"Failed to create benchmark collection from {row['url']}") # noqa: TRY003 + raise ValueError(f"Failed to create benchmark collection from {row['url']}") dashboard_collection.save(path=out_path) return out_path, dashboard_collection diff --git a/scratch/scripts/filter_commits.py b/scratch/scripts/filter_commits.py index 11a8d5d..e0a638c 100644 --- a/scratch/scripts/filter_commits.py +++ b/scratch/scripts/filter_commits.py @@ -4,7 +4,7 @@ import json import re import tempfile -from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, as_completed from pathlib import Path import pandas as pd @@ -114,8 +114,8 @@ def main() -> None: # download all repos to a temp dir with tempfile.TemporaryDirectory(prefix="gh-repos-") as td: - all_repos = {} - for repo_name in tqdm(all_repo_names, desc="Cloning repos"): + + def clone_repo(repo_name: str) -> tuple[str, Repo]: repo_name = repo_name.strip("/") owner, name = repo_name.split("/", 1) path = Path(td) / f"{owner}__{name}.git" @@ -123,14 +123,19 @@ def main() -> None: f"https://github.com/{repo_name}.git", path, bare=True, - # multi_options=["--filter=tree:0"], - multi_options=["--filter=blob:none"], quiet=True, allow_unsafe_options=True, allow_unsafe_protocols=True, ) logger.debug("Cloned repo %s to %s", repo_name, path) - all_repos[repo_name] = repo + return repo_name, repo + + all_repos = {} + with ThreadPoolExecutor(max_workers=args.threads) as tp: + futures = {tp.submit(clone_repo, repo_name): repo_name for repo_name in all_repo_names} + for f in tqdm(as_completed(futures), total=len(futures), desc="Cloning repos"): + repo_name, repo = f.result() + all_repos[repo_name] = repo commit_info_args: list[tuple[Repo, str]] = [] for repo_name, commit_sha in commits[["repo_name", "commit_sha"]].itertuples(index=False, name=None): diff --git a/scratch/scripts/scrape_repositories.py b/scratch/scripts/scrape_repositories.py index 444c30c..f62d39d 100644 --- a/scratch/scripts/scrape_repositories.py +++ b/scratch/scripts/scrape_repositories.py @@ -86,7 +86,7 @@ def main() -> None: filtered_df = filtered_df[filtered_df.repo_name != "airspeed-velocity/asv"] filtered_df = filtered_df[filtered_df.stars >= args.min_stars] if filtered_df.empty: - raise ValueError("No dashboards found in the repositories.") # noqa: TRY003 + raise ValueError("No dashboards found in the repositories.") filtered_df.to_csv(args.filtered_outfile, index=False) logger.info("✅ Filtered dashboards saved to %s", args.filtered_outfile) diff --git a/scratch/scripts/validate_containers.py b/scratch/scripts/validate_containers.py index 6621968..b190fc6 100644 --- a/scratch/scripts/validate_containers.py +++ b/scratch/scripts/validate_containers.py @@ -3,16 +3,19 @@ """ import argparse +import json import logging from pathlib import Path +import asv +import pandas as pd + from datasmith.benchmark.collection import BenchmarkCollection from datasmith.docker.context_registry import CONTEXT_REGISTRY -from datasmith.docker.orchestrator import get_docker_client +from datasmith.docker.orchestrator import get_docker_client, log_container_output from datasmith.logging_config import configure_logging from datasmith.scrape.utils import _parse_commit_url -# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "a")) logger = configure_logging(level=logging.DEBUG) @@ -25,8 +28,12 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--dashboard", type=Path, - required=True, - help="Path to the dashboard containing the benchmarks.", + help="Path to the dashboard containing the benchmarks. Either --dashboard or --commits must be provided.", + ) + parser.add_argument( + "--commits", + type=Path, + help="Path to a JSONL file containing commit information. Either --dashboard or --commits must be provided.", ) parser.add_argument( "--docker-dir", @@ -43,52 +50,128 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() -def main(args: argparse.Namespace) -> None: - dashboard = BenchmarkCollection.load(args.dashboard) - all_states = {} - for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): - if (owner, repo) not in all_states: - all_states[(owner, repo)] = {sha} - else: - all_states[(owner, repo)].add(sha) +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: + if args.dashboard: + dashboard = BenchmarkCollection.load(args.dashboard) + all_states = {} + for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {sha} + else: + all_states[(owner, repo)].add(sha) + elif args.commits: + commits = pd.read_json(args.commits, lines=True) + all_states = {} + for _, row in commits.iterrows(): + repo_name = row["repo_name"] + sha = row["commit_sha"] + has_asv = row.get("has_asv", True) + if not has_asv and "scikit-learn" not in repo_name: + logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + continue + owner, repo = repo_name.split("/") + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {(sha)} + else: + all_states[(owner, repo)].add(sha) + all_states.pop(("scikit-learn", "scikit-learn")) # already validated. + else: + raise ValueError("Either --dashboard or --commits must be provided.") + return all_states + +def main(args: argparse.Namespace) -> None: client = get_docker_client() + all_states = process_inputs(args) + + machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] + all_files_by_image = {} + errors = [] + error_fmt = ( + "$ docker build -t {image_name} src/datasmith/docker/ --build-arg REPO_URL={repo_url} --build-arg COMMIT_SHA={commit_sha}" + + "\n$ docker run --rm -v $(pwd)/output:/output {image_name} asv run --quick --python=same --set-commit-hash={commit_sha}" + ) for (owner, repo), uniq_shas in all_states.items(): - for sha in uniq_shas: - image_name = f"asv-{owner}-{repo}-{sha}" + print("SMALL SCALE TESTING", owner, repo, len(uniq_shas), "ONLY 5") + for sha in list(uniq_shas)[:5]: + image_name = f"asv-{owner}-{repo}-{sha}".lower() docker_ctx = CONTEXT_REGISTRY[image_name] - docker_ctx.build_container( - client=client, - image_name=image_name, - build_args={ - "REPO_URL": f"https://www.github.com/{owner}/{repo}", - "COMMIT_SHA": sha, - }, - force=True, - ) - logger.debug(f"Validating {image_name} for commit {sha}") - # stop any existing container with the same name - container = client.containers.run( - image=image_name, - detach=True, - remove=True, - name=f"asv-{owner}-{repo}-{sha}-validation", - environment={"ASV_ARGS": "--quick --python=same"}, - volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, - ) - for line in container.logs(stream=True, follow=True): - logger.info(line.decode().strip()) - - result = container.wait() - if result.get("StatusCode", 1) != 0: - logger.error( - f"Container {image_name} for commit {sha} failed with status code {result.get('StatusCode', 1)}" + try: + docker_ctx.build_container( + client=client, + image_name=image_name, + build_args={ + "REPO_URL": f"https://www.github.com/{owner}/{repo}", + "COMMIT_SHA": sha, + }, + force=True, ) - else: - logger.info(f"Container {image_name} for commit {sha} completed successfully.") + logger.debug(f"Validating {image_name} for commit {sha}") + # stop any existing container with the same name + machine_args["machine"] = sha + container = client.containers.run( + image=image_name, + detach=True, + name=f"asv-{owner}-{repo}-{sha}-validation", + environment={ + "ASV_ARGS": f"--quick --python=same --set-commit-hash={sha}", + "ASV_MACHINE_ARGS": " ".join([f"--{k} '{v}'" for k, v in machine_args.items()]), + }, + volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, + ) + for line in container.logs(stream=True, follow=True): + logger.info(line.decode().strip()) + + result = container.wait() + if result.get("StatusCode", 1) != 0: + logger.error( + f"Container {image_name} for commit {sha} failed with status code {result.get('StatusCode', 1)}" + ) + errors.append( + error_fmt.format( + image_name=image_name, + repo_url=f"https://www.github.com/{owner}/{repo}", + commit_sha=sha, + ) + ) + files = log_container_output(container, archive="/output") + else: + logger.info(f"Container {image_name} for commit {sha} completed successfully.") + files = log_container_output(container, archive="/output") + print(f"{image_name} completed successfully") + all_files_by_image[image_name] = files + except Exception: + logger.exception(f"Error validating {image_name} for commit {sha}") + errors.append( + error_fmt.format( + image_name=image_name, + repo_url=f"https://www.github.com/{owner}/{repo}", + commit_sha=sha, + ) + ) + continue logger.info("All containers validated successfully.") + # save errors to a file + if errors: + with open(args.output_dir / "errors.txt", "w") as f: + for error in errors: + f.write(f"{error}\n") + logger.error(f"Errors occurred during validation. See {args.output_dir / 'errors.txt'} for details.") + else: + logger.info("No errors occurred during validation.") + # remove all containers + for container in client.containers.list(all=True): + if container.name.startswith("asv-"): + logger.info(f"Removing container {container.name}") + container.remove(force=True) + + # save all-files as a json file + with open(args.output_dir / "all_files_by_image.json", "w") as f: + json.dump(all_files_by_image, f, indent=4) + + logger.info("Results saved to %s", args.output_dir / "all_files_by_image.json") if __name__ == "__main__": diff --git a/src/datasmith/detection/detect_breakpoints.py b/src/datasmith/detection/detect_breakpoints.py index f509ce8..f9ee149 100644 --- a/src/datasmith/detection/detect_breakpoints.py +++ b/src/datasmith/detection/detect_breakpoints.py @@ -80,7 +80,7 @@ def get_detection_method(method: str) -> typing.Callable: elif method == "rbf": return get_breakpoints else: - raise ValueError(f"Unknown method: {method}. Use 'asv' or 'rbf'.") # noqa: TRY003 + raise ValueError(f"Unknown method: {method}. Use 'asv' or 'rbf'.") def detect_all_breakpoints(summary_df: pd.DataFrame, method: str = "rbf") -> pd.DataFrame: diff --git a/src/datasmith/docker/Dockerfile b/src/datasmith/docker/Dockerfile index d954f02..6301d1a 100644 --- a/src/datasmith/docker/Dockerfile +++ b/src/datasmith/docker/Dockerfile @@ -4,7 +4,7 @@ ARG REPO_URL ARG COMMIT_SHA RUN apt-get update && \ apt-get install -y --no-install-recommends \ - curl git build-essential jq && \ + curl git build-essential jq cmake ninja-build && \ rm -rf /var/lib/apt/lists/* RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \ @@ -30,6 +30,7 @@ RUN chmod +x /entrypoint.sh RUN git clone ${REPO_URL} /workspace/repo WORKDIR /workspace/repo +RUN git checkout ${COMMIT_SHA} COPY docker_build.sh /workspace/repo/docker_build.sh RUN chmod +x /workspace/repo/docker_build.sh diff --git a/src/datasmith/docker/METHOD.md b/src/datasmith/docker/METHOD.md new file mode 100644 index 0000000..b56e9ef --- /dev/null +++ b/src/datasmith/docker/METHOD.md @@ -0,0 +1,24 @@ +# Method to add new building_data. + + +1. Run `python scripts/validate_containers.py` with correct args. If it fails, it will print an exception message with building commands for the containers that need to be built. + +2. Run each one individually, e.g.: + ```bash + $ docker build -t asv-scikit-learn-scikit-learn-bbdb2eff9b877c0ae00ed9854099b92119504f62 src/datasmith/docker/ --build-arg REPO_URL=https://www.github.com/scikit-learn/scikit-learn --build-arg COMMIT_SHA=bbdb2eff9b877c0ae00ed9854099b92119504f62 + ``` +3. Change the `docker_build.sh` accordingly. + +4. Add the new docker_build.sh to the context registry. e.g.: + ```python + CONTEXT_REGISTRY.register( + "asv-scikit-learn-scikit-learn", + DockerContext( + building_data="""#!/usr/bin/env bash + (The rest of the modified docker_build.sh script is omitted for brevity) + """.strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), + ) + ``` diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index 01e8d09..c0e29c1 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -1,7 +1,6 @@ from __future__ import annotations import io -import sys import tarfile from pathlib import Path @@ -85,6 +84,7 @@ def build_container( else: logger.info("Docker image '%s' found locally.", image_name) except ImageNotFound: + logger.info("Docker image '%s' not found locally. Building new image.", image_name) pass # Image doesn't exist or was removed, proceed to build if not image_exists: @@ -98,10 +98,70 @@ def build_container( tag=image_name, buildargs=build_args, ) - except DockerException as exc: - sys.exit(f"Failed to build image {image_name}: {exc}") + except DockerException: + logger.exception("Failed to build Docker image '%s'", image_name) else: - raise RuntimeError(f"Docker image '{image_name}' not found and no REPO_URL provided for build.") # noqa: TRY003 + raise RuntimeError(f"Docker image '{image_name}' not found and no REPO_URL provided for build.") if not client.images.get(image_name): - raise RuntimeError(f"Image '{image_name}' failed to build and is not found.") # noqa: TRY003 + raise RuntimeError(f"Image '{image_name}' failed to build and is not found.") + + +class ContextRegistry: + """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" + + def __init__(self, registry: dict[str, DockerContext] | None = None, default_context: DockerContext | None = None): + if registry is None: + registry = {} + self.registry = registry + + if "default" not in self.registry: + if default_context is None: + default_context = DockerContext() + self.registry["default"] = default_context + logger.debug("Default Docker context initialized.") + + def register(self, key: str, context: DockerContext) -> None: + """Register a new Docker context.""" + if key in self.registry: + logger.warning(f"Context '{key}' is already registered, overwriting.") + self.registry[key] = context + logger.debug(f"Registered Docker context: {key}") + + def get(self, key: str) -> DockerContext: + """ + Retrieve a Docker context by key using hierarchical matching. + "asv-astropy-astropy-14134" should query these queries in-order: + "asv-astropy-astropy-14134" + "asv-astropy-astropy" + """ + # Build candidate keys in the required order, deduplicated while preserving order. + candidates = [key] + + if "-" in key: + # e.g., "asv-owner-repo-sha" -> "asv-owner-repo" + owner_repo_key = key.rsplit("-", 1)[0] + candidates.append(owner_repo_key) + + # Preserve order but remove duplicates + seen = set() + ordered_candidates = [] + for c in candidates: + if c not in seen: + ordered_candidates.append(c) + seen.add(c) + + # Try each candidate in order + for candidate in ordered_candidates: + if candidate in self.registry: + if candidate == key: + logger.debug(f"Found exact context for key '{key}'.") + else: + logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") + return self.registry[candidate] + + logger.info(f"No context found for key '{key}'. Using default context.") + return self.registry["default"] + + def __getitem__(self, key: str) -> DockerContext: + return self.get(key) diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index 03a1c65..ee038a7 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -1,77 +1,17 @@ from __future__ import annotations -from datasmith.docker.context import DockerContext +from datasmith.docker.context import ContextRegistry, DockerContext from datasmith.logging_config import get_logger logger = get_logger("docker.context_registry") - -class ContextRegistry: - """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" - - def __init__(self, registry: dict[str, DockerContext] | None = None, default_context: DockerContext | None = None): - if registry is None: - registry = {} - self.registry = registry - - if "default" not in self.registry: - if default_context is None: - default_context = DockerContext() - self.registry["default"] = default_context - logger.debug("Default Docker context initialized.") - - def register(self, key: str, context: DockerContext) -> None: - """Register a new Docker context.""" - if key in self.registry: - logger.warning(f"Context '{key}' is already registered, overwriting.") - self.registry[key] = context - logger.debug(f"Registered Docker context: {key}") - - def get(self, key: str) -> DockerContext: - """ - Retrieve a Docker context by key using hierarchical matching. - "asv-astropy-astropy-14134" should query these queries in-order: - "asv-astropy-astropy-14134" - "asv-astropy-astropy" - """ - # Build candidate keys in the required order, deduplicated while preserving order. - candidates = [key] - - if "-" in key: - # e.g., "asv-owner-repo-sha" -> "asv-owner-repo" - owner_repo_key = key.rsplit("-", 1)[0] - candidates.append(owner_repo_key) - - # Preserve order but remove duplicates - seen = set() - ordered_candidates = [] - for c in candidates: - if c not in seen: - ordered_candidates.append(c) - seen.add(c) - - # Try each candidate in order - for candidate in ordered_candidates: - if candidate in self.registry: - if candidate == key: - logger.debug(f"Found exact context for key '{key}'.") - else: - logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") - return self.registry[candidate] - - logger.info(f"No context found for key '{key}'. Using default context.") - return self.registry["default"] - - def __getitem__(self, key: str) -> DockerContext: - return self.get(key) - - CONTEXT_REGISTRY = ContextRegistry(default_context=DockerContext()) CONTEXT_REGISTRY.register( "asv-astropy-astropy", DockerContext( building_data="""#!/usr/bin/env bash + cd_asv_json_dir() { local match match=$(find . -type f -name "asv.*.json" | head -n 1) @@ -109,7 +49,6 @@ def __getitem__(self, key: str) -> DockerContext: " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME micromamba run -n "asv_${version}" pip install -e . scipy matplotlib done """.strip(), @@ -123,6 +62,7 @@ def __getitem__(self, key: str) -> DockerContext: "asv-scikit-learn-scikit-learn", DockerContext( building_data="""#!/usr/bin/env bash + cd_asv_json_dir() { local match match=$(find . -type f -name "asv.*.json" | head -n 1) @@ -165,7 +105,6 @@ def __getitem__(self, key: str) -> DockerContext: " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME micromamba run -n "asv_${version}" pip install meson-python cython micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} done @@ -174,3 +113,174 @@ def __getitem__(self, key: str) -> DockerContext: entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, ), ) + + +CONTEXT_REGISTRY.register( + "asv-nvidia-warp", + DockerContext( + building_data=""" +#!/usr/bin/env bash + +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 + +# only run the below if condition if bvh.cpp is present +grep -q '^#include ' "${ROOT_PATH}/warp/native/bvh.cpp" || \ + sed -i 's|#include |#include \n#include |' "${ROOT_PATH}/warp/native/bvh.cpp" + +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" pip install meson-python cython + micromamba run -n "asv_${version}" python "${ROOT_PATH}/build_lib.py" + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) + + +CONTEXT_REGISTRY.register( + "asv-python-control-python-control", + DockerContext( + building_data=""" +#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" pip install meson-python cython + # if make_version exists run it + if [[ -f "${ROOT_PATH}/make_version.py" ]]; then + micromamba run -n "asv_${version}" python "${ROOT_PATH}/make_version.py" + fi + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) + + +CONTEXT_REGISTRY.register( + "asv-mdanalysis-mdanalysis", + DockerContext( + building_data=""" +#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy "cython<3" joblib threadpoolctl pytest compilers meson-python + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba activate "asv_${version}" + working_dir=$(pwd) + cd "$ROOT_PATH" || exit 1 + bash maintainer/install_all.sh develop + cd "$working_dir" || exit 1 +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) diff --git a/src/datasmith/docker/docker_build.sh b/src/datasmith/docker/docker_build.sh index 3a72116..d093a36 100644 --- a/src/datasmith/docker/docker_build.sh +++ b/src/datasmith/docker/docker_build.sh @@ -14,11 +14,6 @@ cd_asv_json_dir() { eval "$(micromamba shell hook --shell=bash)" micromamba activate base -apt-get update && \ - apt-get install -y \ - ninja-build \ - cmake - ROOT_PATH=${PWD} cd_asv_json_dir || exit 1 CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index 31fb03b..e8a519d 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -17,6 +17,10 @@ cd_asv_json_dir() { } eval "$(micromamba shell hook --shell=bash)" + +pip install "cython<3" +bash maintainer/install_all.sh develop + micromamba activate base ROOT_PATH=${PWD} cd_asv_json_dir || exit 1 @@ -44,7 +48,6 @@ config.html_dir = str(path / 'html') asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS} micromamba run -n "asv_${version}" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME done diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 32c7dca..2462c19 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -1,12 +1,16 @@ from __future__ import annotations import asyncio +import io +import os import sys +import tarfile from collections.abc import Sequence from pathlib import Path import docker from docker.errors import DockerException, ImageNotFound +from docker.models.containers import Container from datasmith.docker.context_registry import CONTEXT_REGISTRY from datasmith.logging_config import get_logger @@ -69,7 +73,7 @@ async def run_container( asv_args: str, machine_args: dict[str, str], output_dir: Path, -) -> int: +) -> tuple[int, dict[str, str]]: """ Launch one container pinned to *cores* (a cpuset string like ``"4,5,6,7"`` or an iterable of ints) and wait for it to finish. @@ -82,14 +86,14 @@ async def run_container( num_cores = len(cpuset.split(",")) sha = image.split(":")[0].split("-")[-1] # Extract the commit SHA from the image name if "machine" not in machine_args: - raise ValueError("machine_args must contain a 'machine' key") # noqa: TRY003 + raise ValueError("machine_args must contain a 'machine' key") machine_args["machine"] = sha env = { - "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores} --set-commit-hash {sha} --machine {sha}", + "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores} --set-commit-hash={sha} --machine={sha}", "ASV_MACHINE_ARGS": " ".join([f"--{k} '{v}'" for k, v in machine_args.items()]), } - def _launch() -> int: + def _launch() -> tuple[int, dict[str, str]]: container_name = f"{image.split(':')[0]}-{idx:03d}" logger.debug("docker run name=%s cpuset=%s env=%s", container_name, cpuset, env) @@ -107,7 +111,6 @@ def _launch() -> int: container = client.containers.run( image, detach=True, - remove=True, name=container_name, environment=env, cpuset_cpus=cpuset, @@ -124,12 +127,59 @@ def _launch() -> int: logger.info("Container %s started, waiting for it to finish...", container_name) result = container.wait() # blocks until exit logger.info("Container result: %s", result) - return result.get("StatusCode", 1) + + # get the contents of all files in the /output folder and return dictionary. + files = log_container_output(container, archive="/output") + + # remove container + container.remove(force=True) + return result.get("StatusCode", 1), files # Keep the event loop responsive return await asyncio.to_thread(_launch) +def log_container_output(container: Container, archive: str = "/output") -> dict[str, str]: + stream, stat = container.get_archive(archive) + # 3) Load tar stream into memory and walk files + buf = io.BytesIO() + for chunk in stream: + buf.write(chunk) + buf.seek(0) + + files_by_abs_path = {} + + with tarfile.open(fileobj=buf, mode="r:*") as tar: + base = archive # basename of "/output" + for member in tar.getmembers(): + if not member.isfile(): + continue + + # Normalize member path to an absolute container path under /output + name = member.name.lstrip("./") + if name.startswith(base + "/"): + rel = name[len(base) + 1 :] # strip leading "output/" + elif name == base: + continue # it's the directory entry itself + else: + # fallback: treat member.name as already relative to /output + rel = name.lstrip("/") + + abs_path = os.path.join(archive, rel) + + fobj = tar.extractfile(member) + if not fobj: + continue + data = fobj.read() + + # Store text as str when possible, otherwise bytes + try: + files_by_abs_path[abs_path] = data.decode("utf-8") + except UnicodeDecodeError: + files_by_abs_path[abs_path] = str(data) + return files_by_abs_path + + async def orchestrate( docker_image_names: Sequence[str], asv_args: str, @@ -138,7 +188,7 @@ async def orchestrate( n_cores: int, output_dir: Path, client: docker.DockerClient, -) -> None: +) -> dict[str, dict[str, str]]: """ Schedule all pairs while ensuring that each container receives `n_cores` dedicated, non-overlapping CPU cores. @@ -152,13 +202,13 @@ async def orchestrate( for s in core_sets: core_pool.put_nowait(s) - async def worker(idx: int, image: str) -> int: + async def worker(idx: int, image: str) -> tuple[int, dict[str, str]]: core_set = await core_pool.get() # blocks until a free set exists cpuset_str = ",".join(map(str, core_set)) # "0,1,2,3" logger.info("▶︎ cores=%s image=%s", cpuset_str, image) try: - rc = await run_container( + rc, files = await run_container( client=client, idx=idx, cores=cpuset_str, @@ -167,9 +217,9 @@ async def worker(idx: int, image: str) -> int: machine_args=machine_args, output_dir=output_dir, ) - status = "OK" if rc == 0 else f"FAIL({rc})" + status, files = ("OK", files) if rc == 0 else (f"FAIL({rc})", {}) logger.info("■ cores=%s → %s", cpuset_str, status) - return rc + return (rc, files) finally: # Always release the core set, even on failure core_pool.put_nowait(core_set) @@ -177,7 +227,9 @@ async def worker(idx: int, image: str) -> int: tasks = [asyncio.create_task(worker(i, img)) for i, img in enumerate(docker_image_names)] results = await asyncio.gather(*tasks) - failures = sum(rc != 0 for rc in results) + status_codes, files_by_image = zip(*results) + failures = sum(rc != 0 for rc in status_codes) if failures: sys.exit(f"{failures} container(s) failed") logger.info("All benchmarks finished") + return dict(zip(docker_image_names, files_by_image)) diff --git a/src/datasmith/execution/utils.py b/src/datasmith/execution/utils.py index 16530f7..8a37ddc 100644 --- a/src/datasmith/execution/utils.py +++ b/src/datasmith/execution/utils.py @@ -1,7 +1,7 @@ import re from typing import Any -from git import BadName, GitCommandError, Repo +from git import BadName, Commit, GitCommandError, Repo from requests.exceptions import HTTPError from datasmith.logging_config import get_logger @@ -136,7 +136,7 @@ def _get_commit_info(repo_name: str, commit_sha: str) -> dict: } if commit_sha != commit_info["sha"]: - raise ValueError("Commit SHA mismatch") # noqa: TRY003 + raise ValueError("Commit SHA mismatch") return { "sha": commit_info["sha"], "date": commit_info["commit"]["committer"]["date"], @@ -148,6 +148,10 @@ def _get_commit_info(repo_name: str, commit_sha: str) -> dict: } +def has_asv(repo: Repo, c: Commit) -> bool: + return any(obj.type == "blob" and obj.name == "asv.conf.json" for obj in c.tree.traverse()) # type: ignore[union-attr] + + @cache_completion(CACHE_LOCATION, "get_commit_info_offline") def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> dict[str, Any]: """ @@ -158,6 +162,17 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> When we later call `commit.stats`, Git will lazily grab just the blobs needed dto compute line-level stats - still far cheaper than an API call. """ + default_bad = { + "sha": commit_sha, + "date": None, + "message": None, + "total_additions": 0, + "total_deletions": 0, + "total_files_changed": 0, + "files_changed": "", + "patch": "", + "has_asv": False, + } try: commit = repo.commit(commit_sha) except (BadName, ValueError): @@ -166,16 +181,7 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> commit = repo.commit(commit_sha) # retry after fetching except GitCommandError: logger.exception("Error fetching commit info: %s", commit_sha) - return { - "sha": commit_sha, - "date": None, - "message": None, - "total_additions": 0, - "total_deletions": 0, - "total_files_changed": 0, - "files_changed": "", - "patch": "", - } + return default_bad stats = commit.stats @@ -195,6 +201,7 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> "total_files_changed": stats.total["files"], "files_changed": "\n".join(str(k) for k in stats.files), "patch": patch, + "has_asv": has_asv(repo, commit), } @@ -206,10 +213,10 @@ def find_file_in_tree(repo: str, filename: str, branch: str | None = None) -> li if len(repo_info) == 1: repo_info = repo_info[0] # pyright: ignore[reportArgumentType] else: - raise ValueError(f"Expected one repo info object, got {len(repo_info)}") # noqa: TRY003 + raise ValueError(f"Expected one repo info object, got {len(repo_info)}") branch = repo_info.get("default_branch") # pyright: ignore[reportOptionalMemberAccess] if not branch: - raise ValueError("Could not determine the default branch for this repository") # noqa: TRY003 + raise ValueError("Could not determine the default branch for this repository") r = _get_github_metadata(endpoint=f"/repos/{repo}/git/refs/heads/{branch}") if isinstance(r, list): diff --git a/src/datasmith/scrape/code_coverage.py b/src/datasmith/scrape/code_coverage.py index ba7039c..04768b2 100644 --- a/src/datasmith/scrape/code_coverage.py +++ b/src/datasmith/scrape/code_coverage.py @@ -75,7 +75,7 @@ def generate_coverage_dataframe( if base == "#" and (commit_urls is not None) and (index_data["project_url"] in commit_urls): base = commit_urls[index_data["project_url"]] elif base == "#": - raise ValueError( # noqa: TRY003 + raise ValueError( f"Base URL '{base}' is not set and {index_data['project_url']} is not in commit_urls. Please provide a valid base URL." ) diff --git a/src/datasmith/scrape/filter_dashboards.py b/src/datasmith/scrape/filter_dashboards.py index 78a2074..d6e3423 100644 --- a/src/datasmith/scrape/filter_dashboards.py +++ b/src/datasmith/scrape/filter_dashboards.py @@ -109,7 +109,7 @@ def filter_dashboards(df: pd.DataFrame, url_col: str = "repo_name", *, show_prog """ enriched = enrich_repos(df, url_col=url_col, show_progress=show_progress) if not len(enriched): - raise ValueError("Dataframe empty") # noqa: TRY003 + raise ValueError("Dataframe empty") return ( enriched[ (enriched["is_accessible"].fillna(False)) diff --git a/src/datasmith/scrape/utils.py b/src/datasmith/scrape/utils.py index 29b0eb6..6ebd635 100644 --- a/src/datasmith/scrape/utils.py +++ b/src/datasmith/scrape/utils.py @@ -65,21 +65,21 @@ def _parse_commit_url(url: str) -> tuple[str, str, str]: parsed = urlparse(url.strip()) if parsed.scheme not in {"http", "https"}: - raise ValueError(f"Unsupported URL scheme: {parsed.scheme!r}") # noqa: TRY003 + raise ValueError(f"Unsupported URL scheme: {parsed.scheme!r}") if parsed.hostname not in {"github.com", "www.github.com"}: - raise ValueError(f"Not a GitHub URL: {url!r}") # noqa: TRY003 + raise ValueError(f"Not a GitHub URL: {url!r}") path = unquote(parsed.path) parts = [p for p in PurePosixPath(path).parts if p != "/"] if len(parts) < 4 or parts[2] != "commit": - raise ValueError(f"Not a GitHub commit URL: {url!r}") # noqa: TRY003 + raise ValueError(f"Not a GitHub commit URL: {url!r}") owner, repo, sha = parts[0], parts[1], parts[3] if not _HEX.fullmatch(sha): - raise ValueError(f"Invalid commit SHA: {sha!r}") # noqa: TRY003 + raise ValueError(f"Invalid commit SHA: {sha!r}") return owner, repo, sha.lower() diff --git a/src/datasmith/utils.py b/src/datasmith/utils.py index fdc99c7..900eeee 100644 --- a/src/datasmith/utils.py +++ b/src/datasmith/utils.py @@ -60,7 +60,7 @@ def _build_codecov_headers() -> dict[str, str]: def _build_headers(name: str) -> dict[str, str]: if name not in configured_headers: - raise ValueError(f"Unknown header type: {name}. Available types: {', '.join(configured_headers.keys())}") # noqa: TRY003 + raise ValueError(f"Unknown header type: {name}. Available types: {', '.join(configured_headers.keys())}") return configured_headers[name]() @@ -76,7 +76,7 @@ def cache_completion(db_loc: str, table_name: str = "cache"): """ # Validate table_name to avoid SQL-injection risks if not re.match(r"^\w+$", table_name): - raise ValueError("table_name must be alphanumeric/underscore only") # noqa: TRY003 + raise ValueError("table_name must be alphanumeric/underscore only") def decorator(func): @functools.wraps(func) @@ -235,7 +235,7 @@ def prepare_url(base_url: str, params: dict[str, str] | None = None) -> str: r = requests.Request("GET", base_url, params=params) prepared = r.prepare() if prepared.url is None: - raise ValueError(f"Invalid URL: {base_url} with params {params}") # noqa: TRY003 + raise ValueError(f"Invalid URL: {base_url} with params {params}") return prepared.url From 39cfdbcd12a206f3aea9acd3cad592677d160863 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 23 Aug 2025 03:10:30 +0000 Subject: [PATCH 09/20] partial auto-installer working --- Makefile | 26 +- README.md | 10 +- pyproject.toml | 8 +- scratch/scripts/benchmark_commits.py | 2 +- .../scripts/parallel_validate_containers.py | 150 +++++ scratch/scripts/synthesize_contexts.py | 178 ++++++ scratch/scripts/validate_containers.py | 12 +- src/datasmith/agents/config.py | 16 +- src/datasmith/agents/context_synthesis.py | 553 ++++++++++++++++++ src/datasmith/docker/context.py | 213 ++++++- src/datasmith/docker/context_registry.py | 118 +++- src/datasmith/docker/validation.py | 222 +++++++ 12 files changed, 1475 insertions(+), 33 deletions(-) create mode 100644 scratch/scripts/parallel_validate_containers.py create mode 100644 scratch/scripts/synthesize_contexts.py create mode 100644 src/datasmith/agents/context_synthesis.py create mode 100644 src/datasmith/docker/validation.py diff --git a/Makefile b/Makefile index 66a0e46..a3f31e7 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,12 @@ .PHONY: install install: ## Install the virtual environment and install the pre-commit hooks - @echo "🚀 Creating virtual environment using uv" + @echo "Creating virtual environment using uv" @uv sync @uv run pre-commit install .PHONY: backup backup: ## Create a backup of the datasets, results, and analysis directories - @echo "🚀 Creating backup archive" + @echo "Creating backup archive" @/usr/bin/env bash -euo pipefail -c '\ if [ ! -f tokens.env ]; then \ echo "❌ Error: tokens.env file not found"; exit 1; \ @@ -22,33 +22,41 @@ backup: ## Create a backup of the datasets, results, and analysis directories .PHONY: check check: ## Run code quality tools. - @echo "🚀 Checking lock file consistency with 'pyproject.toml'" + @echo "Checking lock file consistency with 'pyproject.toml'" @uv lock --locked - @echo "🚀 Linting code: Running pre-commit" + @echo "Linting code: Running pre-commit" @uv run pre-commit run -a - @echo "🚀 Static type checking: Running mypy" + @echo "Static type checking: Running mypy" @uv run mypy - @echo "🚀 Checking for obsolete dependencies: Running deptry" + @echo "Checking for obsolete dependencies: Running deptry" @uv run deptry src .PHONY: test test: ## Test the code with pytest - @echo "🚀 Testing code: Running pytest" + @echo "Testing code: Running pytest" @uv run python -m pytest --cov --cov-config=pyproject.toml --cov-report=xml .PHONY: build build: clean-build ## Build wheel file - @echo "🚀 Creating wheel file" + @echo "Creating wheel file" @uvx --from build pyproject-build --installer uv .PHONY: clean-build clean-build: ## Clean build artifacts - @echo "🚀 Removing build artifacts" + @echo "Removing build artifacts" @uv run python -c "import shutil; import os; shutil.rmtree('dist') if os.path.exists('dist') else None" +.PHONY: docker-clean +docker-clean: ## Clean up dangling Docker images and containers + @echo "Cleaning up dangling Docker images and containers" + @docker system prune -f + + .PHONY: help help: @uv run python -c "import re; \ [[print(f'\033[36m{m[0]:<20}\033[0m {m[1]}') for m in re.findall(r'^([a-zA-Z_-]+):.*?## (.*)$$', open(makefile).read(), re.M)] for makefile in ('$(MAKEFILE_LIST)').strip().split()]" + + .DEFAULT_GOAL := help diff --git a/README.md b/README.md index e9663ab..9c736dd 100644 --- a/README.md +++ b/README.md @@ -160,13 +160,13 @@ Given the list of repositories, we find the subset of commits that have already ```bash $ python scratch/scripts/collect_commits.py \ - --dashboards scratch/artifacts/raw/repos_valid_sm.csv \ - --outfile scratch/artifacts/raw/commits_all_sm.jsonl \ + --dashboards scratch/artifacts/raw/repos_valid.csv \ + --outfile scratch/artifacts/raw/commits_all.jsonl \ --max-pages 50 $ python scratch/scripts/filter_commits.py \ - --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid_sm.csv \ - --merged-commits-pth scratch/artifacts/raw/commits_all_sm.jsonl \ - --output-pth scratch/artifacts/raw/commits_filtered_sm.jsonl \ + --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid.csv \ + --merged-commits-pth scratch/artifacts/raw/commits_all.jsonl \ + --output-pth scratch/artifacts/raw/commits_filtered.jsonl \ --max-repos 350 \ --threads 8 \ --procs 8 diff --git a/pyproject.toml b/pyproject.toml index 719867f..0a36f74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -142,8 +142,12 @@ module = "dspy.*" ignore_missing_imports = true [[tool.mypy.overrides]] -module = "datasmith.agents.perf_judge" -disable_error_code = ["no-any-unimported"] +module = "asv.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "datasmith.agents.*" +disable_error_code = ["no-any-unimported", "import-untyped"] [[tool.mypy.overrides]] module = "datasmith.execution.utils" diff --git a/scratch/scripts/benchmark_commits.py b/scratch/scripts/benchmark_commits.py index 5594575..bc24a75 100644 --- a/scratch/scripts/benchmark_commits.py +++ b/scratch/scripts/benchmark_commits.py @@ -79,7 +79,7 @@ def process_commits(commits_pth: Path) -> list[tuple[str, str, str]]: repo_name = row["repo_name"] sha = row["commit_sha"] has_asv = row.get("has_asv", True) - if not has_asv and "scikit-learn" not in repo_name: + if not has_asv: logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") continue owner, repo = repo_name.split("/") diff --git a/scratch/scripts/parallel_validate_containers.py b/scratch/scripts/parallel_validate_containers.py new file mode 100644 index 0000000..5ce4ebe --- /dev/null +++ b/scratch/scripts/parallel_validate_containers.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +import argparse +import json +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path + +import asv +import pandas as pd + +from datasmith.benchmark.collection import BenchmarkCollection +from datasmith.docker.orchestrator import get_docker_client +from datasmith.docker.validation import Task, _err_lock, validate_one +from datasmith.logging_config import configure_logging +from datasmith.scrape.utils import _parse_commit_url + +logger = configure_logging() +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + prog="validate_containers", + description="Validate that each benchmark container can be compiled and run with ASV.", + ) + parser.add_argument( + "--dashboard", + type=Path, + help="Path to the dashboard containing the benchmarks. Either --dashboard or --commits must be provided.", + ) + parser.add_argument( + "--commits", + type=Path, + help="Path to a JSONL file containing commit information. Either --dashboard or --commits must be provided.", + ) + parser.add_argument( + "--docker-dir", + type=Path, + default=Path("src/datasmith/docker"), + help="Directory containing the Dockerfile and other necessary files for building the ASV image.", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=Path("output"), + help="Directory where the results will be stored.", + ) + parser.add_argument("--max-workers", type=int, default=8, help="Max parallel builds/runs.") + parser.add_argument("--build-timeout", type=int, default=20 * 60, help="Seconds before aborting a docker build.") + parser.add_argument("--run-timeout", type=int, default=15 * 60, help="Seconds before aborting asv run.") + parser.add_argument("--tail-chars", type=int, default=4000, help="Chars of log tail to include in failure report.") + parser.add_argument( + "--limit-per-repo", type=int, default=5, help="Cap SHAs per repo (keeps your small-scale test). -1 = no limit." + ) + return parser.parse_args() + + +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: + if args.dashboard: + dashboard = BenchmarkCollection.load(args.dashboard) + all_states = {} + for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {sha} + else: + all_states[(owner, repo)].add(sha) + elif args.commits: + commits = pd.read_json(args.commits, lines=True) + all_states = {} + for _, row in commits.iterrows(): + repo_name = row["repo_name"] + sha = row["commit_sha"] + has_asv = row.get("has_asv", True) + if not has_asv: + logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + continue + owner, repo = repo_name.split("/") + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {(sha)} + else: + all_states[(owner, repo)].add(sha) + else: + raise ValueError("Either --dashboard or --commits must be provided.") + return all_states + + +# === main (parallel) === +def main(args: argparse.Namespace) -> None: + client = get_docker_client() + all_states = process_inputs(args) + + # Prepare tasks + tasks: list[Task] = [] + for (owner, repo), uniq in all_states.items(): + limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) + for sha in limited: + tasks.append(Task(owner, repo, sha)) + + (args.output_dir / "results").mkdir(parents=True, exist_ok=True) + # reset outputs + (args.output_dir / "errors.txt").unlink(missing_ok=True) + (args.output_dir / "failures.jsonl").unlink(missing_ok=True) + + machine_defaults: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] + machine_defaults = { + k: str(v.replace(" ", "_").replace("'", "").replace('"', "")) for k, v in machine_defaults.items() + } + + logger.info("Starting parallel validation of %d tasks with %d workers", len(tasks), args.max_workers) + results: list[dict] = [] + + with ThreadPoolExecutor(max_workers=args.max_workers) as ex: + futures = [ex.submit(validate_one, t, args, client, machine_defaults) for t in tasks] + for fut in as_completed(futures): + rec = fut.result() + results.append(rec) + with _err_lock, open(args.output_dir / "failures.jsonl", "a") as jf: + jf.write(json.dumps(rec) + "\n") + + # Rollup (minimal, quick to read) + rollup = { + r["image_name"]: { + "owner": r["owner"], + "repo": r["repo"], + "sha": r["sha"], + "stage": r["stage"], + "ok": r["ok"], + "rc": r["rc"], + "cmd_build": r["cmd_build"], + "cmd_run": r["cmd_run"], + "files": r.get("files", []), + } + for r in results + } + with open(args.output_dir / "all_files_by_image.json", "w") as f: + json.dump(rollup, f, indent=2) + + failed = [r for r in results if not r["ok"]] + if failed: + print("\n=== FAILURES ===") + for r in failed: + print(f"{r['image_name']}: rc={r['rc']} stage={r['stage']}") + print(f"\nDetails: {args.output_dir / 'errors.txt'}") + else: + print("All containers validated successfully.") + + +if __name__ == "__main__": + args = parse_args() + main(args) diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py new file mode 100644 index 0000000..873d735 --- /dev/null +++ b/scratch/scripts/synthesize_contexts.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import argparse +import json +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path + +import asv +import pandas as pd + +from datasmith.agents.context_synthesis import agent_build_and_validate +from datasmith.benchmark.collection import BenchmarkCollection +from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.orchestrator import get_docker_client +from datasmith.docker.validation import Task, _err_lock +from datasmith.logging_config import configure_logging +from datasmith.scrape.utils import _parse_commit_url + +logger = configure_logging() +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + prog="validate_containers", + description="Validate that each benchmark container can be compiled and run with ASV.", + ) + parser.add_argument( + "--dashboard", + type=Path, + help="Path to the dashboard containing the benchmarks. Either --dashboard or --commits must be provided.", + ) + parser.add_argument( + "--commits", + type=Path, + help="Path to a JSONL file containing commit information. Either --dashboard or --commits must be provided.", + ) + parser.add_argument( + "--docker-dir", + type=Path, + default=Path("src/datasmith/docker"), + help="Directory containing the Dockerfile and other necessary files for building the ASV image.", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=Path("output"), + help="Directory where the results will be stored.", + ) + parser.add_argument("--max-workers", type=int, default=8, help="Max parallel builds/runs.") + parser.add_argument("--max-attempts", type=int, default=3, help="Max attempts per task (build+run).") + parser.add_argument("--build-timeout", type=int, default=20 * 60, help="Seconds before aborting a docker build.") + parser.add_argument("--run-timeout", type=int, default=15 * 60, help="Seconds before aborting asv run.") + parser.add_argument("--tail-chars", type=int, default=4000, help="Chars of log tail to include in failure report.") + parser.add_argument( + "--limit-per-repo", type=int, default=5, help="Cap SHAs per repo (keeps your small-scale test). -1 = no limit." + ) + return parser.parse_args() + + +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: + if args.dashboard: + dashboard = BenchmarkCollection.load(args.dashboard) + all_states = {} + for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {sha} + else: + all_states[(owner, repo)].add(sha) + elif args.commits: + commits = pd.read_json(args.commits, lines=True) + all_states = {} + for _, row in commits.iterrows(): + repo_name = row["repo_name"] + sha = row["commit_sha"] + has_asv = row.get("has_asv", True) + if not has_asv: + logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + continue + owner, repo = repo_name.split("/") + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {(sha)} + else: + all_states[(owner, repo)].add(sha) + else: + raise ValueError("Either --dashboard or --commits must be provided.") + return all_states + + +def main(args: argparse.Namespace) -> None: + client = get_docker_client() + all_states = process_inputs(args) + + # Prepare tasks + tasks: list[Task] = [] + for (owner, repo), uniq in all_states.items(): + limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) + for sha in limited: + tasks.append(Task(owner, repo, sha)) + + (args.output_dir / "results").mkdir(parents=True, exist_ok=True) + # reset outputs + (args.output_dir / "errors.txt").unlink(missing_ok=True) + (args.output_dir / "results.jsonl").unlink(missing_ok=True) + + machine_defaults: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] + machine_defaults = { + k: str(v.replace(" ", "_").replace("'", "").replace('"', "")) for k, v in machine_defaults.items() + } + logger.debug("main: machine_defaults keys=%d", len(machine_defaults)) + + results: list[dict] = [] + if args.max_workers < 1: + for t in tasks: + res = agent_build_and_validate( + task=t, args=args, client=client, machine_defaults=machine_defaults, max_attempts=args.max_attempts + ) + results.append(res) + with _err_lock, open(args.output_dir / "results.jsonl", "a") as jf: + jf.write(json.dumps(res) + "\n") + else: + with ThreadPoolExecutor(max_workers=args.max_workers) as ex: + futures = [ + ex.submit( + agent_build_and_validate, + task=t, + args=args, + client=client, + machine_defaults=machine_defaults, + max_attempts=args.max_attempts, + ) + for t in tasks + ] + for fut in as_completed(futures): + res = fut.result() + results.append(res) + with _err_lock, open(args.output_dir / "results.jsonl", "a") as jf: + jf.write(json.dumps(res) + "\n") + + if res["ok"]: + logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) + # Add to CONTEXT_REGISTRY + with CONTEXT_REGISTRY.get_lock(): + CONTEXT_REGISTRY.save_to_file(path=Path("scratch/context_registry.json")) + + # Rollup (minimal, quick to read) + rollup = { + r["image_name"]: { + "owner": r["owner"], + "repo": r["repo"], + "sha": r["sha"], + "stage": r["stage"], + "ok": r["ok"], + "rc": r["rc"], + "duration": r.get("duration_s", None), + "stderr_tail": r.get("stderr_tail", ""), + "stdout_tail": r.get("stdout_tail", ""), + "attempts": r.get("attempts", []), + "files": r.get("files", []), + } + for r in results + } + with open(args.output_dir / "all_files_by_image.json", "w") as f: + json.dump(rollup, f, indent=2) + + failed = [r for r in results if not r["ok"]] + if failed: + print("\n=== FAILURES ===") + for r in failed: + print(f"{r['image_name']}: rc={r['rc']} stage={r['stage']}") + print(f"\nDetails: {args.output_dir / 'errors.txt'}") + else: + print("All containers validated successfully.") + + +if __name__ == "__main__": + args = parse_args() + main(args) diff --git a/scratch/scripts/validate_containers.py b/scratch/scripts/validate_containers.py index b190fc6..e89d9ad 100644 --- a/scratch/scripts/validate_containers.py +++ b/scratch/scripts/validate_containers.py @@ -4,7 +4,6 @@ import argparse import json -import logging from pathlib import Path import asv @@ -16,7 +15,8 @@ from datasmith.logging_config import configure_logging from datasmith.scrape.utils import _parse_commit_url -logger = configure_logging(level=logging.DEBUG) +logger = configure_logging() +# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) def parse_args() -> argparse.Namespace: @@ -66,7 +66,7 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: repo_name = row["repo_name"] sha = row["commit_sha"] has_asv = row.get("has_asv", True) - if not has_asv and "scikit-learn" not in repo_name: + if not has_asv: logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") continue owner, repo = repo_name.split("/") @@ -74,7 +74,6 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: all_states[(owner, repo)] = {(sha)} else: all_states[(owner, repo)].add(sha) - all_states.pop(("scikit-learn", "scikit-learn")) # already validated. else: raise ValueError("Either --dashboard or --commits must be provided.") return all_states @@ -93,8 +92,7 @@ def main(args: argparse.Namespace) -> None: + "\n$ docker run --rm -v $(pwd)/output:/output {image_name} asv run --quick --python=same --set-commit-hash={commit_sha}" ) for (owner, repo), uniq_shas in all_states.items(): - print("SMALL SCALE TESTING", owner, repo, len(uniq_shas), "ONLY 5") - for sha in list(uniq_shas)[:5]: + for sha in list(uniq_shas): image_name = f"asv-{owner}-{repo}-{sha}".lower() docker_ctx = CONTEXT_REGISTRY[image_name] try: @@ -136,12 +134,14 @@ def main(args: argparse.Namespace) -> None: ) ) files = log_container_output(container, archive="/output") + print(f"{image_name} completed failed with status code {result.get('StatusCode', 1)}") else: logger.info(f"Container {image_name} for commit {sha} completed successfully.") files = log_container_output(container, archive="/output") print(f"{image_name} completed successfully") all_files_by_image[image_name] = files except Exception: + print(f"{image_name} for commit {sha} failed to build or run.") logger.exception(f"Error validating {image_name} for commit {sha}") errors.append( error_fmt.format( diff --git a/src/datasmith/agents/config.py b/src/datasmith/agents/config.py index 8eafe18..7c703f5 100644 --- a/src/datasmith/agents/config.py +++ b/src/datasmith/agents/config.py @@ -1,7 +1,7 @@ import logging import os -import dspy # type: ignore[import-untyped] +import dspy logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -10,10 +10,18 @@ def configure_agent_backends() -> None: model = os.getenv("DSPY_MODEL_NAME") backend_url = os.getenv("DSPY_URL") - api_key = os.getenv("DSPY_API_KEY") + if anthropic_api_key := os.getenv("ANTHROPIC_API_KEY"): + api_key = anthropic_api_key + model = os.getenv("ANTHROPIC_MODEL_NAME", "anthropic/claude-3-opus-20240229") + backend_url = None + elif vllm_api_key := os.getenv("DSPY_API_KEY"): + api_key = vllm_api_key + else: + logger.warning("NO API KEY SET") + return - if not model or not backend_url or not api_key: - logger.warning("Environment variables for DSPY model, URL, or API key are not set.") + if not model or not api_key: + logger.warning("Environment variables for DSPY model or API key are not set.") return lm = dspy.LM(model=model, api_base=backend_url, api_key=api_key, model_type="chat") diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py new file mode 100644 index 0000000..639e160 --- /dev/null +++ b/src/datasmith/agents/context_synthesis.py @@ -0,0 +1,553 @@ +# auto_builder.py +from __future__ import annotations + +import argparse +import logging +import pickle +import re +import sys +from dataclasses import dataclass +from pathlib import Path + +import docker +import dspy + +from datasmith.agents.config import configure_agent_backends +from datasmith.docker.context import BuildResult, DockerContext +from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.validation import Task, validate_one + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +handler = logging.StreamHandler(stream=sys.stdout) +handler.setFormatter(logging.Formatter("[%(levelname)s] %(message)s")) +logger.addHandler(handler) + +configure_agent_backends() + + +def _preview(s: str, n: int = 160) -> str: + s = s or "" + s = s.replace("\n", "\\n") + return s[:n] + ("..." if len(s) > n else "") + + +# -------------------------------------------------------------------------------------- +# Heuristic helpers: we extract hints from stderr/stdout and propose concrete patchlets. +# -------------------------------------------------------------------------------------- + + +@dataclass +class Patchlet: + name: str + snippet: str + rationale: str + + +COMMON_PATCHLETS: list[tuple[re.Pattern, Patchlet]] = [ + # PEP 517 / wheel build failures -> try modern build tools + (maybe) disable isolation + ( + re.compile(r"could not build wheels|PEP 517|pyproject\.toml build backend", re.I), + Patchlet( + "modern-build-stack", + 'micromamba run -n "asv_${version}" python -m pip install -U pip setuptools wheel build "meson-python"\n', + "Package uses pyproject/PEP 517; ensure modern build stack including meson-python.", + ), + ), + # C/C++/Fortran compile errors -> make sure compilers & basic headers exist and numpy cython pins are friendly + ( + re.compile(r"(gcc|clang|fortran|gfortran|fatal error:.*\.h: No such file|undefined reference|linker)", re.I), + Patchlet( + "compilers-cffi", + 'micromamba run -n "asv_${version}" python -m pip install "cython<3" "numpy<2" "scipy<1.14"\n', + "Typical compiled extension builds succeed with conservative numeric pins.", + ), + ), + # Meson detected but not installed + ( + re.compile(r"meson\.build|meson-python|meson\b", re.I), + Patchlet( + "meson-explicit", + 'micromamba run -n "asv_${version}" python -m pip install "meson" "meson-python"\n', + "Explicitly install meson if meson.build appears in logs.", + ), + ), + # Cython needed + ( + re.compile(r"cython (is )?required|\.pyx|pyximport", re.I), + Patchlet( + "cython-needed", + 'micromamba run -n "asv_${version}" python -m pip install "cython<3"\n', + "Project wants Cython at buildtime.", + ), + ), + # Old setuptools + ( + re.compile(r"error: invalid command .*bdist_wheel|setuptools\s*([0-9]+\.){1,2}[0-9]+.*too old", re.I), + Patchlet( + "setuptools-up", + 'micromamba run -n "asv_${version}" python -m pip install -U "setuptools>=60" wheel\n', + "Upgrade setuptools/wheel to unlock newer commands.", + ), + ), + # ResolutionImpossible -> try no-build-isolation (project's own constraints) + ( + re.compile(r"ResolutionImpossible|conflict|cannot resolve", re.I), + Patchlet( + "no-build-isolation", + "# Fallback to no-build-isolation to let project manage constraints\n" + 'micromamba run -n "asv_${version}" python -m pip install --no-build-isolation --editable "${ROOT_PATH}"\n', + "Let project control pins if resolver conflicts arise.", + ), + ), +] + + +def derive_patchlets(stderr_tail: str, stdout_tail: str) -> list[Patchlet]: + logger.debug( + "derive_patchlets: analyzing tails (stderr_len=%d, stdout_len=%d)", + len(stderr_tail or ""), + len(stdout_tail or ""), + ) + text = f"{stderr_tail}\n{stdout_tail}" + seen = set() + picks: list[Patchlet] = [] + for pattern, p in COMMON_PATCHLETS: + if pattern.search(text) and p.name not in seen: + seen.add(p.name) + picks.append(p) + logger.debug("derive_patchlets: matched pattern '%s' -> patchlet '%s'", pattern.pattern, p.name) + logger.info("derive_patchlets: selected %d patchlet(s): %s", len(picks), ", ".join([p.name for p in picks]) or "-") + return picks + + +class BuildScriptSynthesis(dspy.Signature): + """Draft a bash script (building_data) to build & install a Python repo inside micromamba envs + discovered via asv.*.json. The script MUST be idempotent and safe to run in Docker. + Respect this template: + - discover and cd into the dir containing asv.*.json + - for each python version listed there: + * create micromamba env "asv_${version}" + * ensure asv + build tooling + * then perform project install (editable or wheel) with best-guess flags + - no user prompts, all non-interactive + """ + + # Inputs + owner = dspy.InputField(desc="GitHub owner/org, e.g. 'scikit-learn'.") + repo = dspy.InputField(desc="Repository name, e.g. 'scikit-learn'.") + sha = dspy.InputField(desc="Full commit SHA to build.") + log_tail = dspy.InputField(desc="Recent stderr/stdout tail (merged, up to ~8k chars).") + last_building_data = dspy.InputField(desc="Previous building_data script; empty on attempt #1.") + heuristic_notes = dspy.InputField(desc="Bullet list of concrete shell lines/patchlets to consider.") + expected_template = dspy.InputField(desc="Stable outer template; only BUILD STEPS may be customized.") + + # Output + building_data = dspy.OutputField(desc="Final executable bash script with only the BUILD STEPS region customized.") + + +class BuildScriptProgram(dspy.Module): + def __init__(self) -> None: + super().__init__() + self.predict = dspy.Predict(BuildScriptSynthesis) + + def forward( + self, + owner: str, + repo: str, + sha: str, + log_tail: str, + last_building_data: str, + heuristic_notes: str, + expected_template: str, + ) -> str: + logger.info( + "DSPy: synthesizing build script for %s/%s@%s (log_tail_len=%d, has_last=%s)", + owner, + repo, + sha, + len(log_tail or ""), + bool(last_building_data), + ) + logger.debug("DSPy: heuristic notes: %s", _preview(heuristic_notes, 240)) + out = self.predict( + owner=owner, + repo=repo, + sha=sha, + log_tail=log_tail, + last_building_data=last_building_data, + heuristic_notes=heuristic_notes, + expected_template=expected_template, + ) + # Safety belt: ensure the required fixed template anchors are present. + script = out.building_data.strip() # pyright: ignore[reportAttributeAccessIssue] + logger.debug("DSPy: candidate script preview: %s", _preview(script, 240)) + must_haves = ["cd_asv_json_dir()", "micromamba", "for version in $python_versions; do"] + ok_template = all(m in script for m in must_haves) + if not ok_template: + logger.warning("DSPy: template anchors missing; falling back to provided template") + script = expected_template + logger.info("DSPy: finalized script length=%d", len(script)) + assert isinstance(script, str), "type mismatch" # noqa: S101 + return script + + +# -------------------------------------------------------------------------------------- +# Template: stable outer shell. The agent fills the "BUILD STEPS" region only. +# -------------------------------------------------------------------------------------- + +BUILDING_TEMPLATE = """#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba activate "asv_${version}" + # ---------- BUILD STEPS (agent should only modify this region) ---------- + # The agent will append/replace lines here based on logs and heuristics. + # first attempt: editable install with no build isolation for projects with bespoke pins + # Remove this line if you want to start from scratch. + python -m pip install --no-build-isolation --editable "${ROOT_PATH}" + # ---------- END BUILD STEPS ---------- +done +""".strip() + + +# -------------------------------------------------------------------------------------- +# Orchestrator +# -------------------------------------------------------------------------------------- + + +@dataclass +class AttemptRecord: + attempt_idx: int + building_data: str + build_result: BuildResult | None = None + + +def _merge_tail(stderr_tail: str, stdout_tail: str, max_len: int = 8000) -> str: + text = (stderr_tail or "") + "\n" + (stdout_tail or "") + return text[-max_len:] + + +def _save_pickle(ctx: DockerContext, path: Path) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "wb") as f: + pickle.dump(ctx, f, protocol=pickle.HIGHEST_PROTOCOL) + logger.info("Saved DockerContext pickle: %s", path.name) + + +def _context_from_script(building_data: str) -> DockerContext: + # Reuse default Dockerfile & entrypoint from the default context + base = CONTEXT_REGISTRY["default"] + logger.debug( + "_context_from_script: base dockerfile len=%d, entrypoint len=%d", + len(base.dockerfile_data or ""), + len(base.entrypoint_data or ""), + ) + logger.debug("_context_from_script: building_data preview: %s", _preview(building_data, 200)) + return DockerContext( + building_data=building_data, + dockerfile_data=base.dockerfile_data, + entrypoint_data=base.entrypoint_data, + ) + + +def synthesize_script( + program: BuildScriptProgram, + task: Task, + last_script: str, + stderr_tail: str, + stdout_tail: str, +) -> str: + logger.info( + "synthesize_script: task=%s/%s@%s, last_script=%s", + task.owner, + task.repo, + task.sha, + "present" if last_script else "none", + ) + patchlets = derive_patchlets(stderr_tail, stdout_tail) + notes = "" + if patchlets: + notes = "\n".join(f"- {p.rationale}\n {p.snippet.strip()}" for p in patchlets) + merged_log = _merge_tail(stderr_tail, stdout_tail) + logger.debug("synthesize_script: merged_log_len=%d, notes_len=%d", len(merged_log), len(notes)) + + script = program( + owner=task.owner, + repo=task.repo, + sha=task.sha, + log_tail=merged_log, + last_building_data=last_script or "", + heuristic_notes=notes or "(no extra hints)", + expected_template=BUILDING_TEMPLATE, + ) + script = str(script) + logger.info("synthesize_script: script length=%d", len(script)) + return script + + +def build_once_with_context( + client: docker.DockerClient, + image_name: str, + context: DockerContext, + repo_url: str, + sha: str, + *, + timeout_s: int, + tail_chars: int, + pull: bool = False, +) -> BuildResult: + logger.info("build_once_with_context: registering context key=%s", image_name) + with CONTEXT_REGISTRY.get_lock(): + CONTEXT_REGISTRY.register(image_name, context) + logger.debug( + "build_once_with_context: build args: REPO_URL=%s, COMMIT_SHA=%s, timeout_s=%s, tail_chars=%s, pull=%s", + repo_url, + sha, + timeout_s, + tail_chars, + pull, + ) + res = context.build_container_streaming( + client=client, + image_name=image_name, + build_args={"REPO_URL": repo_url, "COMMIT_SHA": sha}, + force=True, + timeout_s=timeout_s, + tail_chars=tail_chars, + pull=pull, + ) + logger.info( + "build_once_with_context: result ok=%s rc=%s duration=%.1fs (stderr_tail_len=%d, stdout_tail_len=%d)", + res.ok, + res.rc, + res.duration_s, + len(res.stderr_tail or ""), + len(res.stdout_tail or ""), + ) + logger.debug("build_once_with_context: stderr_tail preview: %s", _preview(res.stderr_tail, 240)) + return res + + +@dataclass +class ArgsLike: + build_timeout: int + run_timeout: int + tail_chars: int + output_dir: Path + + +def agent_build_and_validate( + task: Task, + args: argparse.Namespace, + client: docker.DockerClient, + machine_defaults: dict, + max_attempts: int = 3, +) -> dict: + """ + Main entry: iteratively synthesize build script, build, and validate via your validate_one. + Saves attempt pickles and final pickle on success. + """ + logger.info( + "agent_build_and_validate: start for %s/%s@%s (max_attempts=%d)", task.owner, task.repo, task.sha, max_attempts + ) + program = BuildScriptProgram() + + image_name = f"asv-{task.owner}-{task.repo}-{task.sha}".lower() + repo_url = f"https://www.github.com/{task.owner}/{task.repo}" + logger.debug("agent_build_and_validate: image_name=%s repo_url=%s", image_name, repo_url) + + attempts: list[AttemptRecord] = [] + prior_script = "" # empty on attempt #1 + + # Attempt loop + for i in range(1, max_attempts + 1): + logger.info("agent_build_and_validate: attempt %d/%d", i, max_attempts) + if i == 1: + script = synthesize_script(program, task, prior_script, stderr_tail="", stdout_tail="") + else: + last = attempts[-1].build_result + logger.debug( + "agent_build_and_validate: re-synthesis with last tails (stderr_len=%d, stdout_len=%d)", + len(last.stderr_tail or "") if last else 0, + len(last.stdout_tail or "") if last else 0, + ) + script = synthesize_script( + program, + task, + attempts[-1].building_data, + stderr_tail=(last.stderr_tail if last else ""), + stdout_tail=(last.stdout_tail if last else ""), + ) + + ctx = _context_from_script(script) + # Save attempt pickle + attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" + _save_pickle(ctx, attempt_pickle) + + # Build + logger.info("agent_build_and_validate: building image '%s'", image_name) + build_res = build_once_with_context( + client=client, + image_name=image_name, + context=ctx, + repo_url=repo_url, + sha=task.sha, + timeout_s=args.build_timeout, + tail_chars=args.tail_chars, + ) + attempts.append(AttemptRecord(attempt_idx=i, building_data=script, build_result=build_res)) + + if build_res.ok: + # Save final pickle and then run full validation using your pipeline + final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" + _save_pickle(ctx, final_pickle) + + logger.info("agent_build_and_validate: build succeeded; starting validation run") + result = validate_one(task, args, client, machine_defaults) + logger.info( + "agent_build_and_validate: validation stage=%s ok=%s rc=%s", + result.get("stage"), + result.get("ok"), + result.get("rc"), + ) + + result["attempts"] = [ + { + "attempt": a.attempt_idx, + "ok": (a.build_result.ok if a.build_result else False), + "rc": (a.build_result.rc if a.build_result else None), + "stderr_tail": (a.build_result.stderr_tail if a.build_result else ""), + "stdout_tail": (a.build_result.stdout_tail if a.build_result else ""), + } + for a in attempts + ] + result["context_pickle"] = str(final_pickle) + return result + + # otherwise iterate with new logs + logger.warning( + "agent_build_and_validate: attempt %d failed (rc=%s). Iterating if attempts remain.", + i, + (build_res.rc if build_res else "unknown"), + ) + + # All attempts failed + last = attempts[-1].build_result + logger.error("agent_build_and_validate: all attempts failed for %s", image_name) + # merged_tail = _merge_tail(last.stderr_tail if last else "", last.stdout_tail if last else "") + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": "build", + "ok": False, + "rc": (last.rc if last else 1), + "duration_s": (last.duration_s if last else None), + "stderr_tail": (last.stderr_tail if last else ""), + "stdout_tail": (last.stdout_tail if last else ""), + "attempts": [ + { + "attempt": a.attempt_idx, + "ok": (a.build_result.ok if a.build_result else False), + "rc": (a.build_result.rc if a.build_result else None), + } + for a in attempts + ], + "files": [], + } + + +# def main(args: argparse.Namespace) -> None: +# from scratch.scripts.parallel_validate_containers import process_inputs + +# logger.info("main: starting auto_builder with args: %s", args) +# client = get_docker_client() +# logger.info("main: docker client acquired") + +# all_states = process_inputs(args) +# logger.info("main: process_inputs done; repos=%d", len(all_states)) + +# # Prepare tasks +# tasks: list[Task] = [] +# for (owner, repo), uniq in all_states.items(): +# limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) +# logger.debug("main: repo %s/%s -> %d sha(s) after limit", owner, repo, len(limited)) +# for sha in limited: +# tasks.append(Task(owner, repo, sha)) + +# logger.info("main: total tasks prepared=%d", len(tasks)) + +# (args.output_dir / "results").mkdir(parents=True, exist_ok=True) +# # reset outputs +# (args.output_dir / "errors.txt").unlink(missing_ok=True) +# (args.output_dir / "failures.jsonl").unlink(missing_ok=True) +# logger.debug("main: output directories prepared and old outputs cleared") + +# machine_defaults: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] +# machine_defaults = { +# k: str(v.replace(" ", "_").replace("'", "").replace('"', "")) for k, v in machine_defaults.items() +# } +# logger.debug("main: machine_defaults keys=%d", len(machine_defaults)) + +# single_task = tasks[0] if len(tasks) >= 1 else None +# if single_task is None: +# logger.info( +# "main: multi-task mode -> %d tasks with up to %d workers (exiting early by design)", +# len(tasks), +# args.max_workers, +# ) +# return + +# # Single task mode: useful for debugging +# logger.info("main: single-task mode for %s/%s@%s", single_task.owner, single_task.repo, single_task.sha) +# res = agent_build_and_validate( +# task=single_task, +# client=client, +# args=args, +# machine_defaults=machine_defaults, +# max_attempts=10, +# ) +# print(json.dumps(res, indent=2)) + + +# if __name__ == "__main__": +# from scratch.scripts.parallel_validate_containers import parse_args + +# args = parse_args() +# main(args) diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index c0e29c1..02b8ab1 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -1,17 +1,35 @@ from __future__ import annotations import io +import json import tarfile +import threading +import time +from collections import deque +from collections.abc import Mapping +from dataclasses import dataclass from pathlib import Path +from typing import Any import docker -from docker.errors import DockerException, ImageNotFound +from docker.errors import APIError, DockerException, ImageNotFound from datasmith.logging_config import get_logger logger = get_logger("docker.context") +@dataclass +class BuildResult: + ok: bool + image_name: str + image_id: str | None + rc: int # 0 ok, 124 timeout, 1 generic failure + duration_s: float + stderr_tail: str # tail of error-ish build logs + stdout_tail: str # tail of normal build stream (may help triage) + + class DockerContext: """ A docker context stores all the necessary files to build a docker container @@ -106,6 +124,156 @@ def build_container( if not client.images.get(image_name): raise RuntimeError(f"Image '{image_name}' failed to build and is not found.") + def build_container_streaming( # noqa: C901 + self, + client: docker.DockerClient, + image_name: str, + build_args: dict[str, str], + *, + force: bool = False, + timeout_s: int = 20 * 60, + tail_chars: int = 4000, + pull: bool = False, + ) -> BuildResult: + """ + SDK-only build with streamed logs, tail capture, and a wall-clock timeout. + Returns a BuildResult and does NOT raise for typical failures (so callers can + report immediately). + """ + t0 = time.time() + + # Fast path: respect existing image when not forcing + try: + img = client.images.get(image_name) + if force: + logger.info("Force rebuild requested. Removing '%s'.", image_name) + client.images.remove(image=img.id, force=True) + else: + logger.info("Docker image '%s' found locally (skip build).", image_name) + return BuildResult( + ok=True, + image_name=image_name, + image_id=img.id, + rc=0, + duration_s=time.time() - t0, + stderr_tail="", + stdout_tail="", + ) + except ImageNotFound: + logger.info("Docker image '%s' not found locally. Building.", image_name) + + # Streamed build via low-level API for better control + tar_stream = self.build_tarball_stream() + stdout_buf: deque[str] = deque(maxlen=2000) # chunk-tail buffers + stderr_buf: deque[str] = deque(maxlen=2000) + + # Pretty log line for transparency + if build_args: + build_args_str = " --build-arg ".join(f"{k}={v}" for k, v in build_args.items()) + logger.info("$ docker build -t %s . --build-arg %s", image_name, build_args_str) + else: + logger.info("$ docker build -t %s .", image_name) + + try: + stream = client.api.build( + fileobj=tar_stream, + custom_context=True, + tag=image_name, + buildargs=build_args, + decode=True, + rm=True, + pull=pull, + ) + except DockerException: + logger.exception("Failed to initiate build for '%s'", image_name) + return BuildResult( + ok=False, + image_name=image_name, + image_id=None, + rc=1, + duration_s=time.time() - t0, + stderr_tail="", + stdout_tail="", + ) + + error_seen = None + try: + for chunk in stream: + # Time check first + if time.time() - t0 > timeout_s: + error_seen = "[TIMEOUT]" + break + + # Typical keys: 'stream', 'status', 'error', 'errorDetail' + if chunk.get("stream"): + s = str(chunk["stream"]) + if s: + stdout_buf.append(s) + if "status" in chunk and chunk.get("progressDetail"): + # Status lines (pulling base layers, etc.)—treat as stdout + s = str(chunk.get("status", "")) + if s: + stdout_buf.append(s + "\n") + if "error" in chunk or "errorDetail" in chunk: + error_seen = (chunk.get("error") or str(chunk.get("errorDetail", ""))).strip() + if error_seen: + # also track in stderr tail + stderr_buf.append(error_seen + "\n") + break + except APIError: + logger.exception("Build stream APIError for '%s'", image_name) + error_seen = "APIError during build" + + duration = time.time() - t0 + + # Success path: ensure image exists + if not error_seen: + try: + img = client.images.get(image_name) + return BuildResult( + ok=True, + image_name=image_name, + image_id=img.id, + rc=0, + duration_s=duration, + stderr_tail="".join(stderr_buf)[-tail_chars:], + stdout_tail="".join(stdout_buf)[-tail_chars:], + ) + except ImageNotFound: + error_seen = "Build completed but image not found" + + # Failure + rc = 124 if error_seen == "[TIMEOUT]" else 1 + return BuildResult( + ok=False, + image_name=image_name, + image_id=None, + rc=rc, + duration_s=duration, + stderr_tail="".join(stderr_buf)[-tail_chars:] or (error_seen or "")[-tail_chars:], + stdout_tail="".join(stdout_buf)[-tail_chars:], + ) + + def to_dict(self) -> dict[str, str]: + """Return a JSON-serializable mapping of this context's contents.""" + return { + "dockerfile_data": self.dockerfile_data, + "entrypoint_data": self.entrypoint_data, + "building_data": self.building_data, + } + + @classmethod + def from_dict(cls, data: Mapping[str, Any]) -> DockerContext: + """ + Construct a DockerContext from a mapping. Missing keys fall back to the + default files via the DockerContext __init__ (which accepts None). + """ + return cls( + building_data=data.get("building_data"), + dockerfile_data=data.get("dockerfile_data"), + entrypoint_data=data.get("entrypoint_data"), + ) + class ContextRegistry: """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" @@ -114,6 +282,7 @@ def __init__(self, registry: dict[str, DockerContext] | None = None, default_con if registry is None: registry = {} self.registry = registry + self._lock = threading.Lock() if "default" not in self.registry: if default_context is None: @@ -121,6 +290,9 @@ def __init__(self, registry: dict[str, DockerContext] | None = None, default_con self.registry["default"] = default_context logger.debug("Default Docker context initialized.") + def get_lock(self) -> threading.Lock: + return self._lock + def register(self, key: str, context: DockerContext) -> None: """Register a new Docker context.""" if key in self.registry: @@ -165,3 +337,42 @@ def get(self, key: str) -> DockerContext: def __getitem__(self, key: str) -> DockerContext: return self.get(key) + + def save_to_file(self, path: Path) -> None: + dat = self.serialize(pretty=True) + path.write_text(dat) + logger.info("Context registry saved to %s", path) + + @classmethod + def load_from_file(cls, path: Path) -> ContextRegistry: + dat = path.read_text() + return cls.deserialize(dat) + + def serialize(self, *, pretty: bool = False) -> str: + """ + Serialize the registry (including the 'default' context) to a JSON string. + The thread lock itself is not serialized; a fresh lock will be created + when deserializing. + """ + with self._lock: + payload = { + "version": 1, + "contexts": {k: v.to_dict() for k, v in self.registry.items()}, + } + return json.dumps(payload, indent=2 if pretty else None, sort_keys=pretty) + + @classmethod + def deserialize(cls, payload: str) -> ContextRegistry: + """ + Reconstruct a ContextRegistry from a JSON string produced by `serialize`. + Ensures a 'default' context exists even if it wasn't present in the payload. + """ + data = json.loads(payload) + raw = data.get("contexts", {}) + registry: dict[str, DockerContext] = {k: DockerContext.from_dict(v) for k, v in raw.items()} + + # Ensure 'default' exists (your code expects it). + if "default" not in registry: + registry["default"] = DockerContext() + + return cls(registry=registry) diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index ee038a7..7f3e486 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -49,6 +49,7 @@ " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" micromamba run -n "asv_${version}" pip install -e . scipy matplotlib done """.strip(), @@ -57,7 +58,6 @@ ), ) - CONTEXT_REGISTRY.register( "asv-scikit-learn-scikit-learn", DockerContext( @@ -114,6 +114,57 @@ ), ) +CONTEXT_REGISTRY.register( + "asv-scikit-learn-scikit-learn-8bc36080d9855d29e1fcbc86da46a9e89e86c046", + DockerContext( + building_data="""#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" + export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) + CONTEXT_REGISTRY.register( "asv-nvidia-warp", @@ -273,11 +324,68 @@ " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy "cython<3" joblib threadpoolctl pytest compilers meson-python micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + # if maintainer/install_all.sh exists run it with develop + if [[ -f "maintainer/install_all.sh" ]]; then + micromamba activate "asv_${version}" + working_dir=$(pwd) + cd "$ROOT_PATH" || exit 1 + bash maintainer/install_all.sh develop + cd "$working_dir" || exit 1 + else + micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable . + fi +done +""".strip(), + dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + ), +) + + +CONTEXT_REGISTRY.register( + "asv-nobuild", + DockerContext( + building_data="""#!/usr/bin/env bash +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba activate "asv_${version}" - working_dir=$(pwd) - cd "$ROOT_PATH" || exit 1 - bash maintainer/install_all.sh develop - cd "$working_dir" || exit 1 + pip install git+https://github.com/airspeed-velocity/asv + pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" + # BUILD STEPS GO HERE. done """.strip(), dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, diff --git a/src/datasmith/docker/validation.py b/src/datasmith/docker/validation.py new file mode 100644 index 0000000..8249e3c --- /dev/null +++ b/src/datasmith/docker/validation.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import argparse +import contextlib +import logging +import shlex +import threading +from dataclasses import dataclass +from pathlib import Path + +import docker +from docker.models.containers import Container + +from datasmith.docker.context import BuildResult +from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.orchestrator import log_container_output + +logger = logging.getLogger(__name__) + +_err_lock = threading.Lock() + + +@dataclass(frozen=True) +class Task: + owner: str + repo: str + sha: str + + +def format_cmds(image_name: str, owner: str, repo: str, sha: str, out_dir: Path) -> tuple[str, str]: + build_cmd = ( + f"docker build -t {shlex.quote(image_name)} src/datasmith/docker/ " + f"--build-arg REPO_URL=https://www.github.com/{owner}/{repo} " + f"--build-arg COMMIT_SHA={sha}" + ) + run_cmd = ( + f"docker run --rm -v {shlex.quote(str((out_dir / 'results').absolute()))}:/output " + f"{shlex.quote(image_name)} asv run --quick --python=same --set-commit-hash={sha}" + ) + return build_cmd, run_cmd + + +def append_error_line(path: Path, text: str) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with _err_lock, open(path, "a") as f: + f.write(text.rstrip() + "\n") + + +def tail_chars(text: str | bytes, n: int) -> str: + if isinstance(text, bytes): + try: + text = text.decode("utf-8", errors="replace") + except Exception: + if isinstance(text, bytes): + text = text.decode("latin-1", errors="replace") + return str((text or "")[-n:]) + + +def wait_container_with_timeout(container: Container, timeout_s: int) -> tuple[int | None, bool]: + """ + Wait for container to exit; on timeout, stop it. Returns (exit_code or None, timed_out). + """ + code_box: dict[str, int | None] = {"code": None} + done = threading.Event() + + def _wait() -> None: + try: + res = container.wait() # blocking + code_box["code"] = res.get("StatusCode") + except Exception: + code_box["code"] = None + finally: + done.set() + + t = threading.Thread(target=_wait, daemon=True) + t.start() + finished = done.wait(timeout=timeout_s) + if finished: + return code_box["code"], False + + # Timeout: stop the container + with contextlib.suppress(Exception): + container.stop(timeout=10) + # Make a best-effort to fetch a code after stop + try: + res = container.wait(timeout=10) # docker-py may ignore timeout; best effort + return res.get("StatusCode"), True + except Exception: + return None, True + + +def validate_one(task: Task, args: argparse.Namespace, client: docker.DockerClient, machine_defaults: dict) -> dict: + """ + Build via Docker SDK streaming (with timeout), then run container (with timeout). + Emits errors immediately on failure (build or run). + Returns a structured dict for JSONL summarization. + """ + image_name = f"asv-{task.owner}-{task.repo}-{task.sha}".lower() + docker_ctx = CONTEXT_REGISTRY[image_name] + + build_cmd, run_cmd = format_cmds(image_name, task.owner, task.repo, task.sha, args.output_dir) + + build_res: BuildResult = docker_ctx.build_container_streaming( + client=client, + image_name=image_name, + build_args={ + "REPO_URL": f"https://www.github.com/{task.owner}/{task.repo}", + "COMMIT_SHA": task.sha, + }, + force=True, # preserve your original behavior + timeout_s=args.build_timeout, + tail_chars=args.tail_chars, + pull=False, + ) + + if not build_res.ok: + msg = f"$ {build_cmd}\n$ {run_cmd}\n[build FAILED rc={build_res.rc} in {build_res.duration_s:.1f}s]" + if build_res.stderr_tail: + msg += f"\n---- build stderr tail ----\n{build_res.stderr_tail}" + append_error_line(args.output_dir / "errors.txt", msg) + logger.error(msg) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": "build", + "ok": False, + "rc": build_res.rc, + "duration_s": build_res.duration_s, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": build_res.stderr_tail, + "stdout_tail": build_res.stdout_tail, + "files": [], + } + + # --- RUN --- + # prepare env (clone default Machine args and set machine=sha) + machine_args = dict(machine_defaults) + machine_args["machine"] = task.sha + env = { + "ASV_ARGS": f"--quick --python=same --set-commit-hash={task.sha}", + "ASV_MACHINE_ARGS": " ".join([f"--{k}='{v}'" for k, v in machine_args.items()]), + } + + container = None + files = {} + try: + container = client.containers.run( + image=image_name, + detach=True, + name=f"{image_name}-validation", + environment=env, + volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, + ) + + # Wait with timeout; stop on timeout + exit_code, timed_out = wait_container_with_timeout(container, args.run_timeout) + + # Collect logs tail + try: + raw_logs = container.logs(stdout=True, stderr=True) + except Exception: + raw_logs = b"" + + logs_tail = tail_chars(raw_logs, args.tail_chars) + rc = 124 if timed_out else (exit_code if exit_code is not None else 1) + + # Archive logs/artifacts (your helper) + try: + files = log_container_output(container, archive="/output") + except Exception: + logger.exception("Failed to archive output for %s", image_name) + + ok = rc == 0 + if not ok: + msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED rc={rc} in (<= {args.run_timeout}s)]" + if logs_tail: + msg += f"\n---- run logs tail ----\n{logs_tail}" + append_error_line(args.output_dir / "errors.txt", msg) + logger.error(msg) + + return { # noqa: TRY300 + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": "run" if ok else "run-failed", + "ok": ok, + "rc": rc, + "duration_s": None, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": logs_tail, + "stdout_tail": "", + "files": files, + } + except Exception: + logger.exception("%s failed to run.", image_name) + msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED: exception during start]" + append_error_line(args.output_dir / "errors.txt", msg) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": "run-exception", + "ok": False, + "rc": 1, + "duration_s": None, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": "", + "stdout_tail": "", + "files": [], + } + finally: + # best-effort cleanup + with contextlib.suppress(Exception): + if container: + container.remove(force=True) From 3c99152bd9263e4331bcf39f69ec5c7bc60549bf Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 23 Aug 2025 09:17:59 +0000 Subject: [PATCH 10/20] auto-installer working pipe-flush single --- scratch/context_registry.json | 45 +++ scratch/scripts/benchmark_commits.py | 4 +- .../scripts/parallel_validate_containers.py | 4 +- scratch/scripts/synthesize_contexts.py | 34 +- scratch/scripts/validate_containers.py | 9 +- src/datasmith/agents/context_synthesis.py | 381 +++++------------- src/datasmith/agents/perf_judge.py | 1 - src/datasmith/docker/METHOD.md | 8 +- src/datasmith/docker/context.py | 169 ++++++-- src/datasmith/docker/context_registry.py | 152 +++---- src/datasmith/docker/orchestrator.py | 10 +- src/datasmith/docker/validation.py | 26 +- 12 files changed, 404 insertions(+), 439 deletions(-) create mode 100644 scratch/context_registry.json diff --git a/scratch/context_registry.json b/scratch/context_registry.json new file mode 100644 index 0000000..0bda59b --- /dev/null +++ b/scratch/context_registry.json @@ -0,0 +1,45 @@ +{ + "contexts": { + "Task(owner='astropy', repo='astropy', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\nROOT_PATH=${PWD}\ngit clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install -e . scipy matplotlib\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='default', repo='default', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone\n", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"asv_${version}\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n fi\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + } + }, + "version": 1 +} diff --git a/scratch/scripts/benchmark_commits.py b/scratch/scripts/benchmark_commits.py index bc24a75..7273ab9 100644 --- a/scratch/scripts/benchmark_commits.py +++ b/scratch/scripts/benchmark_commits.py @@ -12,6 +12,7 @@ import asv import pandas as pd +from datasmith.docker.context import ContextRegistry from datasmith.docker.orchestrator import ( build_repo_sha_image, get_docker_client, @@ -97,6 +98,7 @@ def main() -> None: args = parse_args() all_states = process_commits(args.filtered_commits) + context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) max_concurrency = ( args.max_concurrency if args.max_concurrency != -1 else max(4, math.floor(0.5 * (os.cpu_count() or 1))) @@ -122,7 +124,7 @@ def main() -> None: with ThreadPoolExecutor(max_workers=args.num_cores * 4) as pool: futures = [ - pool.submit(build_repo_sha_image, client, owner, repo, sha, args.force_rebuild) + pool.submit(build_repo_sha_image, client, context_registry, owner, repo, sha, args.force_rebuild) for owner, repo, sha in all_states ] for fut in as_completed(futures): diff --git a/scratch/scripts/parallel_validate_containers.py b/scratch/scripts/parallel_validate_containers.py index 5ce4ebe..e325918 100644 --- a/scratch/scripts/parallel_validate_containers.py +++ b/scratch/scripts/parallel_validate_containers.py @@ -9,6 +9,7 @@ import pandas as pd from datasmith.benchmark.collection import BenchmarkCollection +from datasmith.docker.context import ContextRegistry from datasmith.docker.orchestrator import get_docker_client from datasmith.docker.validation import Task, _err_lock, validate_one from datasmith.logging_config import configure_logging @@ -88,6 +89,7 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) + context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) # Prepare tasks tasks: list[Task] = [] @@ -110,7 +112,7 @@ def main(args: argparse.Namespace) -> None: results: list[dict] = [] with ThreadPoolExecutor(max_workers=args.max_workers) as ex: - futures = [ex.submit(validate_one, t, args, client, machine_defaults) for t in tasks] + futures = [ex.submit(validate_one, t, args, client, context_registry, machine_defaults) for t in tasks] for fut in as_completed(futures): rec = fut.result() results.append(rec) diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index 873d735..95477b0 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -1,6 +1,7 @@ from __future__ import annotations import argparse +import datetime import json from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path @@ -10,7 +11,7 @@ from datasmith.agents.context_synthesis import agent_build_and_validate from datasmith.benchmark.collection import BenchmarkCollection -from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.context import ContextRegistry from datasmith.docker.orchestrator import get_docker_client from datasmith.docker.validation import Task, _err_lock from datasmith.logging_config import configure_logging @@ -58,15 +59,15 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() -def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[tuple[str, float]]]: if args.dashboard: dashboard = BenchmarkCollection.load(args.dashboard) all_states = {} for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): if (owner, repo) not in all_states: - all_states[(owner, repo)] = {sha} + all_states[(owner, repo)] = {(sha, 0.0)} else: - all_states[(owner, repo)].add(sha) + all_states[(owner, repo)].add((sha, 0.0)) elif args.commits: commits = pd.read_json(args.commits, lines=True) all_states = {} @@ -78,10 +79,13 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") continue owner, repo = repo_name.split("/") + commit_date_unix: float = ( + 0.0 if row.get("date", None) is None else datetime.datetime.fromisoformat(row["date"]).timestamp() + ) if (owner, repo) not in all_states: - all_states[(owner, repo)] = {(sha)} + all_states[(owner, repo)] = [(sha, commit_date_unix)] else: - all_states[(owner, repo)].add(sha) + all_states[(owner, repo)].append((sha, commit_date_unix)) else: raise ValueError("Either --dashboard or --commits must be provided.") return all_states @@ -90,13 +94,14 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) + context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) # Prepare tasks tasks: list[Task] = [] for (owner, repo), uniq in all_states.items(): limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) - for sha in limited: - tasks.append(Task(owner, repo, sha)) + for sha, date in limited: + tasks.append(Task(owner, repo, sha, commit_date=date)) (args.output_dir / "results").mkdir(parents=True, exist_ok=True) # reset outputs @@ -113,7 +118,12 @@ def main(args: argparse.Namespace) -> None: if args.max_workers < 1: for t in tasks: res = agent_build_and_validate( - task=t, args=args, client=client, machine_defaults=machine_defaults, max_attempts=args.max_attempts + task=t, + args=args, + client=client, + context_registry=context_registry, + machine_defaults=machine_defaults, + max_attempts=args.max_attempts, ) results.append(res) with _err_lock, open(args.output_dir / "results.jsonl", "a") as jf: @@ -128,6 +138,7 @@ def main(args: argparse.Namespace) -> None: client=client, machine_defaults=machine_defaults, max_attempts=args.max_attempts, + context_registry=context_registry, ) for t in tasks ] @@ -139,9 +150,8 @@ def main(args: argparse.Namespace) -> None: if res["ok"]: logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) - # Add to CONTEXT_REGISTRY - with CONTEXT_REGISTRY.get_lock(): - CONTEXT_REGISTRY.save_to_file(path=Path("scratch/context_registry.json")) + with context_registry.get_lock(): + context_registry.save_to_file(path=Path("scratch/context_registry.json")) # Rollup (minimal, quick to read) rollup = { diff --git a/scratch/scripts/validate_containers.py b/scratch/scripts/validate_containers.py index e89d9ad..9360494 100644 --- a/scratch/scripts/validate_containers.py +++ b/scratch/scripts/validate_containers.py @@ -10,7 +10,7 @@ import pandas as pd from datasmith.benchmark.collection import BenchmarkCollection -from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.context import ContextRegistry from datasmith.docker.orchestrator import get_docker_client, log_container_output from datasmith.logging_config import configure_logging from datasmith.scrape.utils import _parse_commit_url @@ -83,6 +83,7 @@ def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) + context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] all_files_by_image = {} @@ -93,8 +94,8 @@ def main(args: argparse.Namespace) -> None: ) for (owner, repo), uniq_shas in all_states.items(): for sha in list(uniq_shas): - image_name = f"asv-{owner}-{repo}-{sha}".lower() - docker_ctx = CONTEXT_REGISTRY[image_name] + image_name = f"asv/{owner}/{repo}/{sha}".lower() + docker_ctx = context_registry[image_name] try: docker_ctx.build_container( client=client, @@ -111,7 +112,7 @@ def main(args: argparse.Namespace) -> None: container = client.containers.run( image=image_name, detach=True, - name=f"asv-{owner}-{repo}-{sha}-validation", + name=f"asv/{owner}/{repo}/{sha}", environment={ "ASV_ARGS": f"--quick --python=same --set-commit-hash={sha}", "ASV_MACHINE_ARGS": " ".join([f"--{k} '{v}'" for k, v in machine_args.items()]), diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index 639e160..656f8a9 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -1,20 +1,18 @@ -# auto_builder.py from __future__ import annotations import argparse import logging import pickle -import re import sys from dataclasses import dataclass +from datetime import datetime, timezone from pathlib import Path import docker import dspy from datasmith.agents.config import configure_agent_backends -from datasmith.docker.context import BuildResult, DockerContext -from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.context import BuildResult, ContextRegistry, DockerContext from datasmith.docker.validation import Task, validate_one logger = logging.getLogger(__name__) @@ -32,93 +30,13 @@ def _preview(s: str, n: int = 160) -> str: return s[:n] + ("..." if len(s) > n else "") -# -------------------------------------------------------------------------------------- -# Heuristic helpers: we extract hints from stderr/stdout and propose concrete patchlets. -# -------------------------------------------------------------------------------------- - - -@dataclass -class Patchlet: - name: str - snippet: str - rationale: str - - -COMMON_PATCHLETS: list[tuple[re.Pattern, Patchlet]] = [ - # PEP 517 / wheel build failures -> try modern build tools + (maybe) disable isolation - ( - re.compile(r"could not build wheels|PEP 517|pyproject\.toml build backend", re.I), - Patchlet( - "modern-build-stack", - 'micromamba run -n "asv_${version}" python -m pip install -U pip setuptools wheel build "meson-python"\n', - "Package uses pyproject/PEP 517; ensure modern build stack including meson-python.", - ), - ), - # C/C++/Fortran compile errors -> make sure compilers & basic headers exist and numpy cython pins are friendly - ( - re.compile(r"(gcc|clang|fortran|gfortran|fatal error:.*\.h: No such file|undefined reference|linker)", re.I), - Patchlet( - "compilers-cffi", - 'micromamba run -n "asv_${version}" python -m pip install "cython<3" "numpy<2" "scipy<1.14"\n', - "Typical compiled extension builds succeed with conservative numeric pins.", - ), - ), - # Meson detected but not installed - ( - re.compile(r"meson\.build|meson-python|meson\b", re.I), - Patchlet( - "meson-explicit", - 'micromamba run -n "asv_${version}" python -m pip install "meson" "meson-python"\n', - "Explicitly install meson if meson.build appears in logs.", - ), - ), - # Cython needed - ( - re.compile(r"cython (is )?required|\.pyx|pyximport", re.I), - Patchlet( - "cython-needed", - 'micromamba run -n "asv_${version}" python -m pip install "cython<3"\n', - "Project wants Cython at buildtime.", - ), - ), - # Old setuptools - ( - re.compile(r"error: invalid command .*bdist_wheel|setuptools\s*([0-9]+\.){1,2}[0-9]+.*too old", re.I), - Patchlet( - "setuptools-up", - 'micromamba run -n "asv_${version}" python -m pip install -U "setuptools>=60" wheel\n', - "Upgrade setuptools/wheel to unlock newer commands.", - ), - ), - # ResolutionImpossible -> try no-build-isolation (project's own constraints) - ( - re.compile(r"ResolutionImpossible|conflict|cannot resolve", re.I), - Patchlet( - "no-build-isolation", - "# Fallback to no-build-isolation to let project manage constraints\n" - 'micromamba run -n "asv_${version}" python -m pip install --no-build-isolation --editable "${ROOT_PATH}"\n', - "Let project control pins if resolver conflicts arise.", - ), - ), -] - - -def derive_patchlets(stderr_tail: str, stdout_tail: str) -> list[Patchlet]: - logger.debug( - "derive_patchlets: analyzing tails (stderr_len=%d, stdout_len=%d)", - len(stderr_tail or ""), - len(stdout_tail or ""), - ) - text = f"{stderr_tail}\n{stdout_tail}" - seen = set() - picks: list[Patchlet] = [] - for pattern, p in COMMON_PATCHLETS: - if pattern.search(text) and p.name not in seen: - seen.add(p.name) - picks.append(p) - logger.debug("derive_patchlets: matched pattern '%s' -> patchlet '%s'", pattern.pattern, p.name) - logger.info("derive_patchlets: selected %d patchlet(s): %s", len(picks), ", ".join([p.name for p in picks]) or "-") - return picks +def _ts_to_iso(ts: float | int | None) -> str: + if ts is None: + return "" + try: + return datetime.fromtimestamp(float(ts), tz=timezone.utc).isoformat().replace("+00:00", "Z") + except Exception: + return str(ts) class BuildScriptSynthesis(dspy.Signature): @@ -134,12 +52,19 @@ class BuildScriptSynthesis(dspy.Signature): """ # Inputs - owner = dspy.InputField(desc="GitHub owner/org, e.g. 'scikit-learn'.") - repo = dspy.InputField(desc="Repository name, e.g. 'scikit-learn'.") - sha = dspy.InputField(desc="Full commit SHA to build.") - log_tail = dspy.InputField(desc="Recent stderr/stdout tail (merged, up to ~8k chars).") + owner_repo = dspy.InputField(desc="The repository this commit belongs to. E.g. 'scikit-learn/scikit-learn'.") + sha = dspy.InputField(desc="The commit SHA that is currently checked out.") + commit_date = dspy.InputField(desc="The commit date in ISO format, e.g. '2023-10-05T12:34:56Z'.") + stderr_logs = dspy.InputField( + desc="The most recent stderr logs from the last build attempt. Upto ~8k tail-end chars." + ) + stdout_logs = dspy.InputField( + desc="The most recent stdout logs from the last build attempt. Upto ~8k tail-end chars." + ) + failure_more = dspy.InputField( + desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." + ) last_building_data = dspy.InputField(desc="Previous building_data script; empty on attempt #1.") - heuristic_notes = dspy.InputField(desc="Bullet list of concrete shell lines/patchlets to consider.") expected_template = dspy.InputField(desc="Stable outer template; only BUILD STEPS may be customized.") # Output @@ -153,30 +78,32 @@ def __init__(self) -> None: def forward( self, - owner: str, - repo: str, + owner_repo: str, sha: str, - log_tail: str, + commit_date: str, + stderr_logs: str, + stdout_logs: str, + failure_more: str, last_building_data: str, - heuristic_notes: str, expected_template: str, ) -> str: logger.info( - "DSPy: synthesizing build script for %s/%s@%s (log_tail_len=%d, has_last=%s)", - owner, - repo, + "DSPy: synthesizing build script for %s@%s (stderr_len=%d, stdout_len=%d, has_last=%s, failure=%s)", + owner_repo, sha, - len(log_tail or ""), + len(stderr_logs or ""), + len(stdout_logs or ""), bool(last_building_data), + failure_more, ) - logger.debug("DSPy: heuristic notes: %s", _preview(heuristic_notes, 240)) out = self.predict( - owner=owner, - repo=repo, + owner_repo=owner_repo, sha=sha, - log_tail=log_tail, - last_building_data=last_building_data, - heuristic_notes=heuristic_notes, + commit_date=commit_date, + stderr_logs=stderr_logs or "", + stdout_logs=stdout_logs or "", + failure_more=failure_more or "N/A", + last_building_data=last_building_data or "", expected_template=expected_template, ) # Safety belt: ensure the required fixed template anchors are present. @@ -192,64 +119,6 @@ def forward( return script -# -------------------------------------------------------------------------------------- -# Template: stable outer shell. The agent fills the "BUILD STEPS" region only. -# -------------------------------------------------------------------------------------- - -BUILDING_TEMPLATE = """#!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} -eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 -fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba activate "asv_${version}" - # ---------- BUILD STEPS (agent should only modify this region) ---------- - # The agent will append/replace lines here based on logs and heuristics. - # first attempt: editable install with no build isolation for projects with bespoke pins - # Remove this line if you want to start from scratch. - python -m pip install --no-build-isolation --editable "${ROOT_PATH}" - # ---------- END BUILD STEPS ---------- -done -""".strip() - - -# -------------------------------------------------------------------------------------- -# Orchestrator -# -------------------------------------------------------------------------------------- - - @dataclass class AttemptRecord: attempt_idx: int @@ -269,28 +138,14 @@ def _save_pickle(ctx: DockerContext, path: Path) -> None: logger.info("Saved DockerContext pickle: %s", path.name) -def _context_from_script(building_data: str) -> DockerContext: - # Reuse default Dockerfile & entrypoint from the default context - base = CONTEXT_REGISTRY["default"] - logger.debug( - "_context_from_script: base dockerfile len=%d, entrypoint len=%d", - len(base.dockerfile_data or ""), - len(base.entrypoint_data or ""), - ) - logger.debug("_context_from_script: building_data preview: %s", _preview(building_data, 200)) - return DockerContext( - building_data=building_data, - dockerfile_data=base.dockerfile_data, - entrypoint_data=base.entrypoint_data, - ) - - def synthesize_script( program: BuildScriptProgram, task: Task, last_script: str, stderr_tail: str, stdout_tail: str, + building_template: str, + failure_more: str, ) -> str: logger.info( "synthesize_script: task=%s/%s@%s, last_script=%s", @@ -299,21 +154,18 @@ def synthesize_script( task.sha, "present" if last_script else "none", ) - patchlets = derive_patchlets(stderr_tail, stdout_tail) - notes = "" - if patchlets: - notes = "\n".join(f"- {p.rationale}\n {p.snippet.strip()}" for p in patchlets) merged_log = _merge_tail(stderr_tail, stdout_tail) - logger.debug("synthesize_script: merged_log_len=%d, notes_len=%d", len(merged_log), len(notes)) + logger.debug("synthesize_script: merged_log_len=%d", len(merged_log)) script = program( - owner=task.owner, - repo=task.repo, + owner_repo=f"{task.owner}/{task.repo}", sha=task.sha, - log_tail=merged_log, + commit_date=_ts_to_iso(getattr(task, "commit_date", None)), + stderr_logs=stderr_tail or "", + stdout_logs=stdout_tail or "", + failure_more=failure_more or "N/A", last_building_data=last_script or "", - heuristic_notes=notes or "(no extra hints)", - expected_template=BUILDING_TEMPLATE, + expected_template=building_template, ) script = str(script) logger.info("synthesize_script: script length=%d", len(script)) @@ -332,16 +184,6 @@ def build_once_with_context( pull: bool = False, ) -> BuildResult: logger.info("build_once_with_context: registering context key=%s", image_name) - with CONTEXT_REGISTRY.get_lock(): - CONTEXT_REGISTRY.register(image_name, context) - logger.debug( - "build_once_with_context: build args: REPO_URL=%s, COMMIT_SHA=%s, timeout_s=%s, tail_chars=%s, pull=%s", - repo_url, - sha, - timeout_s, - tail_chars, - pull, - ) res = context.build_container_streaming( client=client, image_name=image_name, @@ -351,43 +193,37 @@ def build_once_with_context( tail_chars=tail_chars, pull=pull, ) - logger.info( - "build_once_with_context: result ok=%s rc=%s duration=%.1fs (stderr_tail_len=%d, stdout_tail_len=%d)", - res.ok, - res.rc, - res.duration_s, - len(res.stderr_tail or ""), - len(res.stdout_tail or ""), - ) - logger.debug("build_once_with_context: stderr_tail preview: %s", _preview(res.stderr_tail, 240)) return res -@dataclass -class ArgsLike: - build_timeout: int - run_timeout: int - tail_chars: int - output_dir: Path - - def agent_build_and_validate( task: Task, args: argparse.Namespace, client: docker.DockerClient, machine_defaults: dict, + context_registry: ContextRegistry, max_attempts: int = 3, ) -> dict: """ Main entry: iteratively synthesize build script, build, and validate via your validate_one. Saves attempt pickles and final pickle on success. """ + assert task.sha is not None, "task.sha must be set" # noqa: S101 + other_contexts = context_registry.get_similar(task) + logger.info("agent_build_and_validate: found %d similar contexts", len(other_contexts)) + most_similar = other_contexts[0][1] if len(other_contexts) >= 1 else None + if most_similar and most_similar.building_data: + default_building_data = most_similar.building_data + else: + default_building_data = context_registry["asv/default/default"].building_data + logger.info( "agent_build_and_validate: start for %s/%s@%s (max_attempts=%d)", task.owner, task.repo, task.sha, max_attempts ) + program = BuildScriptProgram() - image_name = f"asv-{task.owner}-{task.repo}-{task.sha}".lower() + image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() repo_url = f"https://www.github.com/{task.owner}/{task.repo}" logger.debug("agent_build_and_validate: image_name=%s repo_url=%s", image_name, repo_url) @@ -397,24 +233,46 @@ def agent_build_and_validate( # Attempt loop for i in range(1, max_attempts + 1): logger.info("agent_build_and_validate: attempt %d/%d", i, max_attempts) + if i == 1: - script = synthesize_script(program, task, prior_script, stderr_tail="", stdout_tail="") + failure_more = "N/A" + script = synthesize_script( + program, + task, + prior_script, + stderr_tail="", + stdout_tail="", + building_template=default_building_data, + failure_more=failure_more, + ) else: last = attempts[-1].build_result + stderr_tail = (last.stderr_tail if last else "") or "" + stdout_tail = (last.stdout_tail if last else "") or "" + if last and last.rc == 124: + failure_more = "build timeout" + else: + failure_more = f"build failed rc={last.rc}" if last else "build failed" logger.debug( - "agent_build_and_validate: re-synthesis with last tails (stderr_len=%d, stdout_len=%d)", - len(last.stderr_tail or "") if last else 0, - len(last.stdout_tail or "") if last else 0, + "agent_build_and_validate: re-synthesis with last tails (stderr_len=%d, stdout_len=%d, failure=%s)", + len(stderr_tail), + len(stdout_tail), + failure_more, ) script = synthesize_script( program, task, attempts[-1].building_data, - stderr_tail=(last.stderr_tail if last else ""), - stdout_tail=(last.stdout_tail if last else ""), + stderr_tail=stderr_tail, + stdout_tail=stdout_tail, + building_template=default_building_data, + failure_more=failure_more, ) - ctx = _context_from_script(script) + ctx = DockerContext(building_data=script) + with context_registry.get_lock(): + context_registry.register(image_name, ctx) + # Save attempt pickle attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" _save_pickle(ctx, attempt_pickle) @@ -436,9 +294,10 @@ def agent_build_and_validate( # Save final pickle and then run full validation using your pipeline final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" _save_pickle(ctx, final_pickle) + context_registry.save_to_file(Path("scratch/context_registry.json")) logger.info("agent_build_and_validate: build succeeded; starting validation run") - result = validate_one(task, args, client, machine_defaults) + result = validate_one(task, args, client, context_registry, machine_defaults) logger.info( "agent_build_and_validate: validation stage=%s ok=%s rc=%s", result.get("stage"), @@ -467,9 +326,9 @@ def agent_build_and_validate( ) # All attempts failed + last = attempts[-1].build_result logger.error("agent_build_and_validate: all attempts failed for %s", image_name) - # merged_tail = _merge_tail(last.stderr_tail if last else "", last.stdout_tail if last else "") return { "owner": task.owner, "repo": task.repo, @@ -491,63 +350,3 @@ def agent_build_and_validate( ], "files": [], } - - -# def main(args: argparse.Namespace) -> None: -# from scratch.scripts.parallel_validate_containers import process_inputs - -# logger.info("main: starting auto_builder with args: %s", args) -# client = get_docker_client() -# logger.info("main: docker client acquired") - -# all_states = process_inputs(args) -# logger.info("main: process_inputs done; repos=%d", len(all_states)) - -# # Prepare tasks -# tasks: list[Task] = [] -# for (owner, repo), uniq in all_states.items(): -# limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) -# logger.debug("main: repo %s/%s -> %d sha(s) after limit", owner, repo, len(limited)) -# for sha in limited: -# tasks.append(Task(owner, repo, sha)) - -# logger.info("main: total tasks prepared=%d", len(tasks)) - -# (args.output_dir / "results").mkdir(parents=True, exist_ok=True) -# # reset outputs -# (args.output_dir / "errors.txt").unlink(missing_ok=True) -# (args.output_dir / "failures.jsonl").unlink(missing_ok=True) -# logger.debug("main: output directories prepared and old outputs cleared") - -# machine_defaults: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] -# machine_defaults = { -# k: str(v.replace(" ", "_").replace("'", "").replace('"', "")) for k, v in machine_defaults.items() -# } -# logger.debug("main: machine_defaults keys=%d", len(machine_defaults)) - -# single_task = tasks[0] if len(tasks) >= 1 else None -# if single_task is None: -# logger.info( -# "main: multi-task mode -> %d tasks with up to %d workers (exiting early by design)", -# len(tasks), -# args.max_workers, -# ) -# return - -# # Single task mode: useful for debugging -# logger.info("main: single-task mode for %s/%s@%s", single_task.owner, single_task.repo, single_task.sha) -# res = agent_build_and_validate( -# task=single_task, -# client=client, -# args=args, -# machine_defaults=machine_defaults, -# max_attempts=10, -# ) -# print(json.dumps(res, indent=2)) - - -# if __name__ == "__main__": -# from scratch.scripts.parallel_validate_containers import parse_args - -# args = parse_args() -# main(args) diff --git a/src/datasmith/agents/perf_judge.py b/src/datasmith/agents/perf_judge.py index b6e35de..4fb09c1 100644 --- a/src/datasmith/agents/perf_judge.py +++ b/src/datasmith/agents/perf_judge.py @@ -236,4 +236,3 @@ def get_response(self, message: str) -> tuple[bool, str]: # return (tp - 3*fn) - 0.5*fp # tele = BootstrapFewShot(metric=recall_weighted_metric, max_bootstrapped_demos=6, max_labeled_demos=6) # optimized = tele.compile(PerfClassifier(), trainset=train) # returns an optimized program -# import IPython; IPython.embed(header="perf_judge.py: debugging") diff --git a/src/datasmith/docker/METHOD.md b/src/datasmith/docker/METHOD.md index b56e9ef..c3d3b28 100644 --- a/src/datasmith/docker/METHOD.md +++ b/src/datasmith/docker/METHOD.md @@ -11,14 +11,12 @@ 4. Add the new docker_build.sh to the context registry. e.g.: ```python - CONTEXT_REGISTRY.register( - "asv-scikit-learn-scikit-learn", + context_registry.register( + "asv/scikit-learn/scikit-learn", DockerContext( building_data="""#!/usr/bin/env bash (The rest of the modified docker_build.sh script is omitted for brevity) - """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + """.strip() ), ) ``` diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index 02b8ab1..fcb108b 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -1,5 +1,6 @@ from __future__ import annotations +import datetime import io import json import tarfile @@ -14,6 +15,7 @@ import docker from docker.errors import APIError, DockerException, ImageNotFound +from datasmith.execution.utils import _get_commit_info from datasmith.logging_config import get_logger logger = get_logger("docker.context") @@ -30,6 +32,14 @@ class BuildResult: stdout_tail: str # tail of normal build stream (may help triage) +@dataclass(frozen=True) +class Task: + owner: str + repo: str + sha: str | None = None + commit_date: float = 0.0 + + class DockerContext: """ A docker context stores all the necessary files to build a docker container @@ -269,16 +279,16 @@ def from_dict(cls, data: Mapping[str, Any]) -> DockerContext: default files via the DockerContext __init__ (which accepts None). """ return cls( - building_data=data.get("building_data"), dockerfile_data=data.get("dockerfile_data"), entrypoint_data=data.get("entrypoint_data"), + building_data=data.get("building_data"), ) class ContextRegistry: """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" - def __init__(self, registry: dict[str, DockerContext] | None = None, default_context: DockerContext | None = None): + def __init__(self, registry: dict[Task, DockerContext] | None = None, default_context: DockerContext | None = None): if registry is None: registry = {} self.registry = registry @@ -287,57 +297,140 @@ def __init__(self, registry: dict[str, DockerContext] | None = None, default_con if "default" not in self.registry: if default_context is None: default_context = DockerContext() - self.registry["default"] = default_context + self.registry[Task(owner="default", repo="default", sha=None)] = default_context logger.debug("Default Docker context initialized.") def get_lock(self) -> threading.Lock: return self._lock - def register(self, key: str, context: DockerContext) -> None: + def parse_key(self, key: str) -> Task: + """Parse a string key into a Task object.""" + if not key.startswith("asv/") and not key.startswith("asv/default"): + raise ValueError("Key must start with 'asv/' or 'asv/default'") + + # Special "default" handling: e.g. "asv/default-" + if key.startswith("asv/default"): + parts = key.split("-") + repo = parts[-1] if len(parts) > 2 else "default" + return Task(owner="default", repo=repo, sha=None) + + parts = key.split("/") + if parts[0] != "asv" or not (3 <= len(parts) <= 4): + raise ValueError("Key must be in the format 'asv/owner/repo' or 'asv/owner/repo/sha'") + owner, repo = parts[1], parts[2] + sha = None if len(parts) != 4 else parts[3] + # Compute commit date if we have a sha; otherwise 0.0 + date_unix = 0.0 + if sha: + try: + logger.debug(f"Fetching commit info for {owner}/{repo}@{sha}") + commit_info = _get_commit_info(f"{owner}/{repo}", sha) + date_iso = commit_info["date"] + date_unix = datetime.datetime.fromisoformat(date_iso.replace("Z", "+00:00")).timestamp() + except Exception as exc: + logger.warning("Failed to fetch commit info for %s/%s@%s: %s", owner, repo, sha, exc) + date_unix = 0.0 + + return Task(owner=owner, repo=repo, sha=sha, commit_date=date_unix) + + def register(self, key: str | Task, context: DockerContext) -> None: """Register a new Docker context.""" + if isinstance(key, str): + key = self.parse_key(key) if key in self.registry: logger.warning(f"Context '{key}' is already registered, overwriting.") self.registry[key] = context logger.debug(f"Registered Docker context: {key}") - def get(self, key: str) -> DockerContext: + def get(self, key: str | Task) -> DockerContext: """ Retrieve a Docker context by key using hierarchical matching. - "asv-astropy-astropy-14134" should query these queries in-order: - "asv-astropy-astropy-14134" - "asv-astropy-astropy" + "asv/astropy/astropy/14134" should query these queries in-order: + "asv/astropy/astropy/14134" + "asv/astropy/astropy" """ - # Build candidate keys in the required order, deduplicated while preserving order. - candidates = [key] - - if "-" in key: - # e.g., "asv-owner-repo-sha" -> "asv-owner-repo" - owner_repo_key = key.rsplit("-", 1)[0] - candidates.append(owner_repo_key) - - # Preserve order but remove duplicates - seen = set() - ordered_candidates = [] - for c in candidates: - if c not in seen: - ordered_candidates.append(c) - seen.add(c) - - # Try each candidate in order - for candidate in ordered_candidates: - if candidate in self.registry: - if candidate == key: - logger.debug(f"Found exact context for key '{key}'.") - else: - logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") - return self.registry[candidate] - + if isinstance(key, str): + key = self.parse_key(key) + + if key.sha is not None and key in self.registry: + logger.debug(f"Found exact context for key '{key}'.") + return self.registry[key] + elif Task(owner=key.owner, repo=key.repo, sha=None) in self.registry: + candidate = Task(owner=key.owner, repo=key.repo, sha=None) + logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") + return self.registry[candidate] logger.info(f"No context found for key '{key}'. Using default context.") - return self.registry["default"] + return self.registry[Task(owner="default", repo="default", sha=None)] + + def get_similar(self, key: str | Task) -> list[tuple[Task, DockerContext]]: # noqa: C901 + """ + Retrieve a list of Docker contexts by key using hierarchical matching. + "asv/astropy/astropy/14134" should return contexts for these queries in-order: + 1) "asv/astropy/astropy/14134" (exact match, if present) + 2) Any others starting with "asv/astropy/astropy/" (e.g., "asv/astropy/astropy/abcdef") + sorted by abs(key.commit_date / candidate.commit_date) if key.commit_date is not None else alphabetically + 3) "asv/astropy/astropy" (owner/repo base, if present) + Keys like "asv/astropy/otherrepo*" or "asv/otherowner/*" must NOT match. + """ + if isinstance(key, str): + key = self.parse_key(key) + + results: list[tuple[Task, DockerContext]] = [] + seen: set[Task] = set() + + # 1) Exact match first (if present) + if key in self.registry: + results.append((key, self.registry[key])) + seen.add(key) + + # 2) Other shas for the same owner/repo + candidates: list[tuple[Task, DockerContext]] = [] + for t, ctx in self.registry.items(): + if t in seen: + continue + if t.owner == key.owner and t.repo == key.repo and t.sha is not None: + candidates.append((t, ctx)) + + # Sort candidates: + # - By commit-date proximity if key has a (sha, commit_date) + # - Otherwise alphabetically by sha + has_valid_commit_date = getattr(key, "sha", None) is not None and getattr(key, "commit_date", None) is not None + + if has_valid_commit_date: + + def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: + t, _ = item + cand_cd = getattr(t, "commit_date", None) + # Missing commit_date gets sorted to the end. + if cand_cd is None: + return (float("inf"), str(t.sha)) + try: + return (abs(key.commit_date - cand_cd), str(t.sha)) + except Exception: + return (float("inf"), str(t.sha)) + + candidates.sort(key=_sort) + else: + candidates.sort(key=lambda item: str(item[0].sha)) + + for t, ctx in candidates: + if t not in seen: + results.append((t, ctx)) + seen.add(t) + + # 3) Base owner/repo (sha=None) at the end, if present and not already added + base = Task(owner=key.owner, repo=key.repo, sha=None) + if base in self.registry and base not in seen: + results.append((base, self.registry[base])) + + return results def __getitem__(self, key: str) -> DockerContext: return self.get(key) + def __setitem__(self, key: str, context: DockerContext) -> None: + self.register(key, context) + def save_to_file(self, path: Path) -> None: dat = self.serialize(pretty=True) path.write_text(dat) @@ -357,7 +450,7 @@ def serialize(self, *, pretty: bool = False) -> str: with self._lock: payload = { "version": 1, - "contexts": {k: v.to_dict() for k, v in self.registry.items()}, + "contexts": {repr(k): v.to_dict() for k, v in self.registry.items()}, } return json.dumps(payload, indent=2 if pretty else None, sort_keys=pretty) @@ -369,10 +462,10 @@ def deserialize(cls, payload: str) -> ContextRegistry: """ data = json.loads(payload) raw = data.get("contexts", {}) - registry: dict[str, DockerContext] = {k: DockerContext.from_dict(v) for k, v in raw.items()} + registry: dict[Task, DockerContext] = {eval(k): DockerContext.from_dict(v) for k, v in raw.items()} # noqa: S307 - # Ensure 'default' exists (your code expects it). + # Ensure 'default' exists: if "default" not in registry: - registry["default"] = DockerContext() + registry[Task(owner="default", repo="default", sha=None)] = DockerContext() return cls(registry=registry) diff --git a/src/datasmith/docker/context_registry.py b/src/datasmith/docker/context_registry.py index 7f3e486..a2c1960 100644 --- a/src/datasmith/docker/context_registry.py +++ b/src/datasmith/docker/context_registry.py @@ -1,5 +1,7 @@ from __future__ import annotations +from pathlib import Path + from datasmith.docker.context import ContextRegistry, DockerContext from datasmith.logging_config import get_logger @@ -8,7 +10,7 @@ CONTEXT_REGISTRY = ContextRegistry(default_context=DockerContext()) CONTEXT_REGISTRY.register( - "asv-astropy-astropy", + "asv/astropy/astropy", DockerContext( building_data="""#!/usr/bin/env bash @@ -49,17 +51,19 @@ " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + #### BUILD STEPS GO HERE. #### export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" micromamba run -n "asv_${version}" pip install -e . scipy matplotlib + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv-scikit-learn-scikit-learn", + "asv/scikit-learn/scikit-learn", DockerContext( building_data="""#!/usr/bin/env bash @@ -106,16 +110,18 @@ micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv micromamba run -n "asv_${version}" pip install meson-python cython + #### BUILD STEPS GO HERE. #### micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv-scikit-learn-scikit-learn-8bc36080d9855d29e1fcbc86da46a9e89e86c046", + "asv/scikit-learn/scikit-learn/8bc36080d9855d29e1fcbc86da46a9e89e86c046", DockerContext( building_data="""#!/usr/bin/env bash cd_asv_json_dir() { @@ -155,19 +161,21 @@ " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + #### BUILD STEPS GO HERE. #### micromamba run -n "asv_${version}" pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv-nvidia-warp", + "asv/nvidia/warp", DockerContext( building_data=""" #!/usr/bin/env bash @@ -216,18 +224,20 @@ micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv micromamba run -n "asv_${version}" pip install meson-python cython + #### BUILD STEPS GO HERE. #### micromamba run -n "asv_${version}" python "${ROOT_PATH}/build_lib.py" micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv-python-control-python-control", + "asv/python-control/python-control", DockerContext( building_data=""" #!/usr/bin/env bash @@ -269,21 +279,23 @@ micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv micromamba run -n "asv_${version}" pip install meson-python cython + #### BUILD STEPS GO HERE. #### # if make_version exists run it if [[ -f "${ROOT_PATH}/make_version.py" ]]; then micromamba run -n "asv_${version}" python "${ROOT_PATH}/make_version.py" fi micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv-mdanalysis-mdanalysis", + "asv/mdanalysis/mdanalysis", DockerContext( building_data=""" #!/usr/bin/env bash @@ -324,6 +336,7 @@ " micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy "cython<3" joblib threadpoolctl pytest compilers meson-python micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + #### BUILD STEPS GO HERE. #### # if maintainer/install_all.sh exists run it with develop if [[ -f "maintainer/install_all.sh" ]]; then micromamba activate "asv_${version}" @@ -334,61 +347,64 @@ else micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable . fi + #### BUILD STEPS END HERE. #### done """.strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, + dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, + entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) -CONTEXT_REGISTRY.register( - "asv-nobuild", - DockerContext( - building_data="""#!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} -eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 -fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba activate "asv_${version}" - pip install git+https://github.com/airspeed-velocity/asv - pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" - # BUILD STEPS GO HERE. -done -""".strip(), - dockerfile_data=CONTEXT_REGISTRY["default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["default"].entrypoint_data, - ), -) +# CONTEXT_REGISTRY.register( +# "asv/default/nobuild", +# DockerContext( +# building_data="""#!/usr/bin/env bash +# cd_asv_json_dir() { +# local match +# match=$(find . -type f -name "asv.*.json" | head -n 1) + +# if [[ -n "$match" ]]; then +# local dir +# dir=$(dirname "$match") +# cd "$dir" || echo "Failed to change directory to $dir" +# else +# echo "No 'asv.*.json' file found in current directory or subdirectories." +# fi +# } +# eval "$(micromamba shell hook --shell=bash)" +# micromamba activate base + +# ROOT_PATH=${PWD} +# cd_asv_json_dir || exit 1 +# CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +# if [[ -z "$CONF_NAME" ]]; then +# echo "No 'asv.*.json' file found in current directory or subdirectories." +# exit 1 +# fi +# python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +# for version in $python_versions; do +# python -c "import asv, os, pathlib +# path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +# path.mkdir(parents=True, exist_ok=True) + +# config = asv.config.Config.load('$CONF_NAME') +# config.results_dir = str(path / 'results') +# config.html_dir = str(path / 'html') + +# asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +# asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +# " +# micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers +# micromamba activate "asv_${version}" +# pip install git+https://github.com/airspeed-velocity/asv +# pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" +# # BUILD STEPS GO HERE. +# done +# """.strip(), +# dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, +# entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, +# ), +# ) + +CONTEXT_REGISTRY.save_to_file(Path("scratch/context_registry.json")) diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 2462c19..42d27a3 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -12,7 +12,7 @@ from docker.errors import DockerException, ImageNotFound from docker.models.containers import Container -from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.context import ContextRegistry from datasmith.logging_config import get_logger logger = get_logger("docker.orchestrator") @@ -50,9 +50,11 @@ def build_repo_image(client: docker.DockerClient, image_name: str, repo_url: str raise RuntimeError -def build_repo_sha_image(client: docker.DockerClient, owner: str, repo: str, sha: str, force: bool = False) -> str: - image_name = f"asv-{owner}-{repo}-{sha}" - docker_ctx = CONTEXT_REGISTRY[image_name] +def build_repo_sha_image( + client: docker.DockerClient, context_registry: ContextRegistry, owner: str, repo: str, sha: str, force: bool = False +) -> str: + image_name = f"asv/{owner}/{repo}/{sha}" + docker_ctx = context_registry[image_name] docker_ctx.build_container( client=client, image_name=image_name, diff --git a/src/datasmith/docker/validation.py b/src/datasmith/docker/validation.py index 8249e3c..cc9ffe5 100644 --- a/src/datasmith/docker/validation.py +++ b/src/datasmith/docker/validation.py @@ -5,14 +5,12 @@ import logging import shlex import threading -from dataclasses import dataclass from pathlib import Path import docker from docker.models.containers import Container -from datasmith.docker.context import BuildResult -from datasmith.docker.context_registry import CONTEXT_REGISTRY +from datasmith.docker.context import BuildResult, ContextRegistry, Task from datasmith.docker.orchestrator import log_container_output logger = logging.getLogger(__name__) @@ -20,13 +18,6 @@ _err_lock = threading.Lock() -@dataclass(frozen=True) -class Task: - owner: str - repo: str - sha: str - - def format_cmds(image_name: str, owner: str, repo: str, sha: str, out_dir: Path) -> tuple[str, str]: build_cmd = ( f"docker build -t {shlex.quote(image_name)} src/datasmith/docker/ " @@ -89,14 +80,21 @@ def _wait() -> None: return None, True -def validate_one(task: Task, args: argparse.Namespace, client: docker.DockerClient, machine_defaults: dict) -> dict: +def validate_one( + task: Task, + args: argparse.Namespace, + client: docker.DockerClient, + context_registry: ContextRegistry, + machine_defaults: dict, +) -> dict: """ Build via Docker SDK streaming (with timeout), then run container (with timeout). Emits errors immediately on failure (build or run). Returns a structured dict for JSONL summarization. """ - image_name = f"asv-{task.owner}-{task.repo}-{task.sha}".lower() - docker_ctx = CONTEXT_REGISTRY[image_name] + assert task.sha is not None, "Task.sha must be set" # noqa: S101 + image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() + docker_ctx = context_registry[image_name] build_cmd, run_cmd = format_cmds(image_name, task.owner, task.repo, task.sha, args.output_dir) @@ -150,7 +148,7 @@ def validate_one(task: Task, args: argparse.Namespace, client: docker.DockerClie container = client.containers.run( image=image_name, detach=True, - name=f"{image_name}-validation", + name=f"{image_name.replace('/', '-')}-validation", environment=env, volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, ) From 002f766ff6a57ac991d8d5feb1226ef5e6b5711c Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 23 Aug 2025 10:31:45 +0000 Subject: [PATCH 11/20] partially working autobuilder --- scratch/context_registry.json | 95 +++++++++++++ scratch/scripts/synthesize_contexts.py | 9 +- src/datasmith/agents/context_synthesis.py | 25 ++-- src/datasmith/docker/context.py | 3 +- src/datasmith/docker/validation.py | 156 +++++++++++++++------- 5 files changed, 231 insertions(+), 57 deletions(-) diff --git a/scratch/context_registry.json b/scratch/context_registry.json index 0bda59b..8239317 100644 --- a/scratch/context_registry.json +++ b/scratch/context_registry.json @@ -25,16 +25,111 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='caaa1f52a0632294bf951a9283d015f7b5dd5dd5', commit_date=1732650609.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='cbe8648c33b94bd919c35f4d1e2ae1c4432d9749', commit_date=1748364732.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d5df806c9715443f5dc7de9023a1b7aa2045eae4', commit_date=1677234005.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dc580a8ef5ee2a8aea80498388690e2213118efd', commit_date=1670501069.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e411c29625e66f7e440f1acce4069e01201cf122', commit_date=1672782103.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eae3f294d3ba8ae636730537faef4cdd612083ff', commit_date=1678119642.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ef200eb16813f4e579f3a4e6cd4603e16f72f5a8', commit_date=1680030341.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f86f41d80bff882689fc16bd7da1fef4a805b464', commit_date=1695653805.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f96ce586eecb361d53b192ea3b44098d1bd49a77', commit_date=1637843007.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index 95477b0..a492418 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -128,6 +128,10 @@ def main(args: argparse.Namespace) -> None: results.append(res) with _err_lock, open(args.output_dir / "results.jsonl", "a") as jf: jf.write(json.dumps(res) + "\n") + + if int(res["rc"]) != 1: + logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) + context_registry.save_to_file(path=Path("scratch/context_registry.json")) else: with ThreadPoolExecutor(max_workers=args.max_workers) as ex: futures = [ @@ -148,10 +152,9 @@ def main(args: argparse.Namespace) -> None: with _err_lock, open(args.output_dir / "results.jsonl", "a") as jf: jf.write(json.dumps(res) + "\n") - if res["ok"]: + if int(res["rc"]) != 1: logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) - with context_registry.get_lock(): - context_registry.save_to_file(path=Path("scratch/context_registry.json")) + context_registry.save_to_file(path=Path("scratch/context_registry.json")) # Rollup (minimal, quick to read) rollup = { diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index 656f8a9..b4a23b0 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -3,7 +3,6 @@ import argparse import logging import pickle -import sys from dataclasses import dataclass from datetime import datetime, timezone from pathlib import Path @@ -16,11 +15,6 @@ from datasmith.docker.validation import Task, validate_one logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) -handler = logging.StreamHandler(stream=sys.stdout) -handler.setFormatter(logging.Formatter("[%(levelname)s] %(message)s")) -logger.addHandler(handler) - configure_agent_backends() @@ -184,6 +178,14 @@ def build_once_with_context( pull: bool = False, ) -> BuildResult: logger.info("build_once_with_context: registering context key=%s", image_name) + logger.debug( + "build_once_with_context: build args: REPO_URL=%s, COMMIT_SHA=%s, timeout_s=%s, tail_chars=%s, pull=%s", + repo_url, + sha, + timeout_s, + tail_chars, + pull, + ) res = context.build_container_streaming( client=client, image_name=image_name, @@ -193,6 +195,15 @@ def build_once_with_context( tail_chars=tail_chars, pull=pull, ) + logger.info( + "build_once_with_context: result ok=%s rc=%s duration=%.1fs (stderr_tail_len=%d, stdout_tail_len=%d)", + res.ok, + res.rc, + res.duration_s, + len(res.stderr_tail or ""), + len(res.stdout_tail or ""), + ) + logger.debug("build_once_with_context: stderr_tail preview: %s", _preview(res.stderr_tail, 240)) return res @@ -294,8 +305,6 @@ def agent_build_and_validate( # Save final pickle and then run full validation using your pipeline final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" _save_pickle(ctx, final_pickle) - context_registry.save_to_file(Path("scratch/context_registry.json")) - logger.info("agent_build_and_validate: build succeeded; starting validation run") result = validate_one(task, args, client, context_registry, machine_defaults) logger.info( diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index fcb108b..0805c2c 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -433,7 +433,8 @@ def __setitem__(self, key: str, context: DockerContext) -> None: def save_to_file(self, path: Path) -> None: dat = self.serialize(pretty=True) - path.write_text(dat) + with self._lock: + path.write_text(dat) logger.info("Context registry saved to %s", path) @classmethod diff --git a/src/datasmith/docker/validation.py b/src/datasmith/docker/validation.py index cc9ffe5..1267ff9 100644 --- a/src/datasmith/docker/validation.py +++ b/src/datasmith/docker/validation.py @@ -80,7 +80,95 @@ def _wait() -> None: return None, True -def validate_one( +def _handle_build_error( + task: Task, + build_cmd: str, + run_cmd: str, + build_res: BuildResult, + args: argparse.Namespace, + image_name: str, + build_stage: str, +) -> dict: + msg = f"$ {build_cmd}\n$ {run_cmd}\n[build FAILED rc={build_res.rc} in {build_res.duration_s:.1f}s]" + if build_res.stderr_tail: + msg += f"\n---- build stderr tail ----\n{build_res.stderr_tail}" + append_error_line(args.output_dir / "errors.txt", msg) + logger.error(msg) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": build_stage, + "ok": False, + "rc": build_res.rc, + "duration_s": build_res.duration_s, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": build_res.stderr_tail, + "stdout_tail": build_res.stdout_tail, + "files": [], + } + + +def _handle_run_error( + task: Task, + build_cmd: str, + run_cmd: str, + rc: int, + logs_tail: str, + args: argparse.Namespace, + image_name: str, + run_stage: str, + build_stage: str, + files: dict[str, str], +) -> dict: + msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED rc={rc} in (<= {args.run_timeout}s)]" + if logs_tail: + msg += f"\n---- run logs tail ----\n{logs_tail}" + append_error_line(args.output_dir / "errors.txt", msg) + logger.error(msg) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": f"{run_stage}+{build_stage}", + "ok": False, + "rc": rc, + "duration_s": None, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": logs_tail, + "stdout_tail": "", + "files": files, + } + + +def _handle_run_exception( + task: Task, build_cmd: str, run_cmd: str, args: argparse.Namespace, image_name: str, build_stage: str +) -> dict: + logger.exception("%s failed to run.", image_name) + msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED: exception during start]" + append_error_line(args.output_dir / "errors.txt", msg) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": f"run-exception+{build_stage}", + "ok": False, + "rc": 1, + "duration_s": None, + "cmd_build": build_cmd, + "cmd_run": run_cmd, + "stderr_tail": "", + "stdout_tail": "", + "files": [], + } + + +def validate_one( # noqa: C901 task: Task, args: argparse.Namespace, client: docker.DockerClient, @@ -110,28 +198,15 @@ def validate_one( tail_chars=args.tail_chars, pull=False, ) + if build_res.rc == 124: + build_stage = "build-timeout" + elif build_res.rc != 0: + build_stage = "build-failed" + else: + build_stage = "build-ok" if not build_res.ok: - msg = f"$ {build_cmd}\n$ {run_cmd}\n[build FAILED rc={build_res.rc} in {build_res.duration_s:.1f}s]" - if build_res.stderr_tail: - msg += f"\n---- build stderr tail ----\n{build_res.stderr_tail}" - append_error_line(args.output_dir / "errors.txt", msg) - logger.error(msg) - return { - "owner": task.owner, - "repo": task.repo, - "sha": task.sha, - "image_name": image_name, - "stage": "build", - "ok": False, - "rc": build_res.rc, - "duration_s": build_res.duration_s, - "cmd_build": build_cmd, - "cmd_run": run_cmd, - "stderr_tail": build_res.stderr_tail, - "stdout_tail": build_res.stdout_tail, - "files": [], - } + return _handle_build_error(task, build_cmd, run_cmd, build_res, args, image_name, build_stage) # --- RUN --- # prepare env (clone default Machine args and set machine=sha) @@ -172,19 +247,27 @@ def validate_one( logger.exception("Failed to archive output for %s", image_name) ok = rc == 0 + + # set stage to "run-{failed/ok/timeout}" + "build-{failed/ok/timeout}" for clarity + run_stage = "run" + if timed_out: + run_stage += "-timeout" + elif not ok: + run_stage += "-failed" + else: + run_stage += "-ok" + if not ok: - msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED rc={rc} in (<= {args.run_timeout}s)]" - if logs_tail: - msg += f"\n---- run logs tail ----\n{logs_tail}" - append_error_line(args.output_dir / "errors.txt", msg) - logger.error(msg) + return _handle_run_error( + task, build_cmd, run_cmd, rc, logs_tail, args, image_name, run_stage, build_stage, files + ) return { # noqa: TRY300 "owner": task.owner, "repo": task.repo, "sha": task.sha, "image_name": image_name, - "stage": "run" if ok else "run-failed", + "stage": f"{run_stage}+{build_stage}", "ok": ok, "rc": rc, "duration_s": None, @@ -195,24 +278,7 @@ def validate_one( "files": files, } except Exception: - logger.exception("%s failed to run.", image_name) - msg = f"$ {build_cmd}\n$ {run_cmd}\n[run FAILED: exception during start]" - append_error_line(args.output_dir / "errors.txt", msg) - return { - "owner": task.owner, - "repo": task.repo, - "sha": task.sha, - "image_name": image_name, - "stage": "run-exception", - "ok": False, - "rc": 1, - "duration_s": None, - "cmd_build": build_cmd, - "cmd_run": run_cmd, - "stderr_tail": "", - "stdout_tail": "", - "files": [], - } + return _handle_run_exception(task, build_cmd, run_cmd, args, image_name, build_stage) finally: # best-effort cleanup with contextlib.suppress(Exception): From 913524d0c022ac630aa91ec236d9ca4679b66e77 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sun, 24 Aug 2025 04:46:24 +0000 Subject: [PATCH 12/20] added portkey --- pyproject.toml | 1 + scratch/context_registry.json | 1221 +++++++++++++++++++++++- scratch/scripts/synthesize_contexts.py | 2 +- src/datasmith/agents/config.py | 12 +- uv.lock | 32 + 5 files changed, 1247 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0a36f74..a462925 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ dependencies = [ "gitpython", "numpy", "pandas", + "portkey-ai>=1.14.3", "requests", "ruptures", "simple-useragent", diff --git a/scratch/context_registry.json b/scratch/context_registry.json index 8239317..9ee14e6 100644 --- a/scratch/context_registry.json +++ b/scratch/context_registry.json @@ -10,81 +10,1131 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0030b3864eb77a90a9442904e7d64d1619c6add5', commit_date=1607478583.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0880dc18c211a6508240a43ff6fe618c9be7f568', commit_date=1617487191.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0b6b372fdfcdef15aacbe1c2b82d728f4f1c0401', commit_date=1607478582.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0f9a6e558a5798880c7b5604346a8a15826d0187', commit_date=1607980018.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package-requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r ${ROOT_PATH}/package-requirements.txt\n fi\n\n # Install optional dependencies that may be needed for tests\n micromamba run -n \"asv_${version}\" pip install pytest-xdist pytest-cov sphinx sphinx-sitemap sphinx-rtd-theme\n\n # Build and install the package\n cd ${ROOT_PATH}\n # First try pip install with --no-deps to avoid dependency conflicts\n if ! micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .; then\n # If that fails, try building wheel and installing\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install --no-deps dist/*.whl\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='108ffe0b19080b39975a93f947162f7371ac9144', commit_date=1539114837.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install project in development mode\n cd ${ROOT_PATH}\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='13a5df0fcbf13852da5613cefd84708e1fd506c6', commit_date=1618051027.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='18372f1526d8b0df776232504afe508ae8944b4c', commit_date=1696946134.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions below 3.9 as per error message\n if [[ \"$version\" < \"3.9\" ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n \n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\" build\n \n # Install additional dependencies from requirements files\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r package/requirements.txt\n fi\n \n # Build and install MDAnalysis from package directory\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e package/\n # Install dependencies after the package installation\n micromamba run -n \"asv_${version}\" pip install -e package/[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1cd2b3b4f4d70c24c8de234d35ba1a7f900212c0', commit_date=1629565332.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.71\" \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1cfe404e5d2c2a807162d4e3d440b6969e14d87b', commit_date=1732305078.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build and test dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.36\" \"numpy>=1.21.0\" pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython tidynamics\n \n # Ensure build system requirements are met\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel\n \n # Look for package in subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Build and install MDAnalysis in development mode\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .[test,analysis]\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .[test,analysis]\n else\n echo \"Neither pyproject.toml nor setup.py found. Cannot install package.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1eca65591fa402584dd29b6d1a02111af30e68eb', commit_date=1691706686.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install additional required packages\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n # Install requirements one by one to better handle failures\n while IFS= read -r requirement || [[ -n \"$requirement\" ]]; do\n # Skip empty lines and comments\n [[ -z \"$requirement\" || \"$requirement\" =~ ^#.*$ ]] && continue\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < package/requirements.txt\n fi\n \n # Build and install MDAnalysis\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='264d6f9357a978444baa1f99411a03453664ab2b', commit_date=1672867721.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n \n # Install build dependencies first\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install \"numpy<2.0.0\" cython\n \n # Install package requirements before build\n if [ -f \"package/requirements.txt\" ]; then\n # Create temporary requirements file with numpy constraint\n sed 's/numpy>=2.0.0rc1/numpy<2.0.0/g' package/requirements.txt > temp_requirements.txt\n micromamba run -n \"asv_${version}\" pip install -r temp_requirements.txt\n rm temp_requirements.txt\n fi\n \n # Build and install package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='26880f0f0a4bb831fca9668650400858c34f442b', commit_date=1602889606.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='2ee4e9da5aa3a2c1b21fc3d1897bd70e0ab2064d', commit_date=1602770152.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis with optimized flags\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='35d9d2e3ab08e7e6741b57fe02a7215fe3b91a6c', commit_date=1742597504.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and tools\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install networkx matplotlib seaborn netCDF4 mmtf-python gsd biopython parmed griddataformats joblib threadpoolctl scikit-learn hypothesis codecov\n \n # Install MDAnalysis in editable mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='392c8ae5391e20f5e496f7ac03dae08c44deca3b', commit_date=1646727863.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest setuptools wheel build\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n # First check if we're in the package directory, if not try to find it\n if [ ! -f \"setup.py\" ] && [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"setup.py\" ]; then\n # Install in editable mode with required build dependencies\n micromamba run -n \"asv_${version}\" pip install -e . --no-deps --no-build-isolation\n else\n echo \"Error: setup.py not found in expected locations\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='39b0e4cc184725cd0e5e710780c8154ed4de9f4f', commit_date=1524419705.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools>=45.0\" wheel \"cython>=0.29\" numpy\n \n # Install MDAnalysis in editable mode with specific dependencies for this older version\n micromamba run -n \"asv_${version}\" pip install --no-deps --verbose --editable \"${ROOT_PATH}\"\n \n # Install runtime dependencies appropriate for the 2018 version\n micromamba run -n \"asv_${version}\" pip install six mmtf-python mock biopython networkx gsd scipy matplotlib\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='404040598f78db05882fa5b2bba1d35fc6a30510', commit_date=1605754667.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='42c541771ab7aee318783d296caa3e10b33f53eb', commit_date=1613225552.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13,<3.0\" \"numpy>=1.16.0\" \"setuptools>=40.8.0\" wheel\n \n # Additional dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd matplotlib netcdf4 networkx\n \n # Build and install MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='4365f3b07c1bf2ebcf16424b26162102954c5b90', commit_date=1591777205.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='45e56e8314c278e3eb98ed7a6029b74e7435e8be', commit_date=1598362533.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='474be5bbe32270bb9ddf02dc3cab74d3c1312c5e', commit_date=1728274662.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n \n # Check if we're in the right directory and if setup.py exists\n cd ${ROOT_PATH}\n if [ ! -f \"setup.py\" ] && [ ! -f \"pyproject.toml\" ]; then\n # Try to find the package directory\n if [ -d \"package\" ]; then\n cd package\n elif [ -d \"mdanalysis\" ]; then\n cd mdanalysis\n fi\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='481e36a3aa8767c4b895eabfd7ef8b89132ab611', commit_date=1723835551.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx tidynamics biopython\n \n # Check if we're in the right directory and if setup.py exists\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd \"${ROOT_PATH}/package\" || exit 1\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='4fafd51de84d5b89be0559a412acefde0040847c', commit_date=1726273184.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='52b3d261240efed0546d9f15ee42c7f445e72c13', commit_date=1693261706.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with version constraints for Python 3.8\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython\n \n # Install additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock biopython networkx gsd\n \n # Install package in development mode\n cd \"${ROOT_PATH}\"\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found. Attempting direct install.\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='559528f3349bebcaeb82e7f97fd6b76ae8aecce2', commit_date=1501861121.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install numpy and other build dependencies separately to handle version constraints\n if [[ \"$version\" == \"2.7\" ]]; then\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<1.17\" \"scipy<1.3\" \"cython<3.0\" pytest setuptools\n else\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<2.0\" \"scipy<2.0\" \"cython<3.0\" pytest setuptools\n fi\n \n # Install compilers and build tools\n micromamba install -y -n \"asv_${version}\" -c conda-forge compilers wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5948963e0e9d92c9ddd0829ba3df3d9d496bbf01', commit_date=1672872621.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install --upgrade \"pip<24.0\" setuptools wheel\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\"\n \n # Install package requirements from package directory\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install with relaxed constraints and ignore errors\n sed 's/>=/~=/g' ${ROOT_PATH}/package/requirements.txt | grep -v \"numpy\" | micromamba run -n \"asv_${version}\" pip install -r /dev/stdin || true\n fi\n \n # Install additional dependencies needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"biopython~=1.80\" fasteners griddataformats\n \n # Install the package in development mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n else\n echo \"No pyproject.toml or setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='59f4e395178240d5e3f36088d7a4d98ddd0e3607', commit_date=1680135568.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx-rtd-theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5bf1979b36cd4d5f55d691e6927aa606fbeb8791', commit_date=1703619619.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install \"numpy>=1.21.0\" \"cython>=0.29.32\" \"mmtf-python>=1.0.0\" gsd biopython scipy pytest\n\n # Look for package directory containing setup.py\n cd \"${ROOT_PATH}\"\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n fi\n\n # Set environment variables to help with compilation\n export CFLAGS=\"-std=c99 -O3 -funroll-loops -fsigned-zeros\"\n export NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION\n\n # Build and install MDAnalysis with specific build settings\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5c19974c43125c94f98ab45d2f9965c70e427eec', commit_date=1541518721.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-deps \"cython>=0.16\" numpy>=1.10.0 biopython>=1.71 networkx>=1.0 griddataformats>=0.4.0 six>=1.4.0 fasteners mmtf-python>=1.0.0 tqdm>=4.43.0 packaging>=20.0 pytest>=3.3.0 mock\n \n # Build and install MDAnalysis in development mode with specific numpy version constraint\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"numpy>=1.16.5,<2.0\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5cf8c5599e1a27c53e774c436b4e03fe71080f7a', commit_date=1534279531.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional dependencies required by MDAnalysis\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python tqdm mock joblib\n\n # Build and install MDAnalysis with appropriate flags\n if [[ \"$version\" == \"2.7\" ]]; then\n # For Python 2.7, use a more conservative installation approach\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable ${ROOT_PATH}\n else\n # For Python 3.x, use build isolation disabled for better compatibility\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='61e236d45c52030d74ba6277c0a59e8a43a13ea9', commit_date=1593710203.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with pinned versions appropriate for 2020\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29,<0.30\" \"numpy>=1.13.0,<1.19\" \"biopython>=1.71,<1.78\" \\\n \"networkx>=1.0,<2.5\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0,<1.6\" \\\n \"joblib<1.0\" \"mock\" \"psutil<5.8\" \"pytest<6.0\" \"pytest-cov\" \"pytest-xdist<2.0\" \"hypothesis<6.0\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='62c35d49bd9458f2b5057d28d4904391a4a38513', commit_date=1534780584.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='6bc52ec2f0744cdf3c63a2e43aff232381ec4dd1', commit_date=1669766518.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n\n # Ensure we're in the package directory\n cd ${ROOT_PATH}/package || cd ${ROOT_PATH}\n\n # Try to build and install MDAnalysis\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither setup.py nor pyproject.toml found in current directory\"\n exit 1\n fi\n\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='6d5ef34292899958ea2a0148388ecc47cf499da1', commit_date=1620729923.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='73cd1e69be88f1b47b1327c1918c0ad326bec302', commit_date=1603501474.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install build dependencies \n micromamba install -y -n \"asv_${version}\" -c conda-forge numpy scipy cython pytest compilers setuptools pip wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='740cae26820eba538f9990ec904adc9f39a65b2e', commit_date=1619881090.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='740e74e8c61ea01a4b2120bd369b11a58cb9c304', commit_date=1728331627.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create base environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \\\n cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme \\\n mmtf-python mock gsd griddataformats tidynamics \\\n setuptools wheel build\n\n # Look for package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n\n # Try to build and install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found in current directory\"\n exit 1\n fi\n cd ${ROOT_PATH}/benchmarks\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='79dead30cc19cd821617a6746663a68709b276e0', commit_date=1754497815.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python ninja cython packaging\n # Build and install MDAnalysis with meson\n cd ${ROOT_PATH}\n # Ensure we're in the package directory with pyproject.toml\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n fi\n micromamba run -n \"asv_${version}\" python -m pip install --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='7c468a46344d17f91d44059332fcc533dad01cde', commit_date=1567026117.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython networkx matplotlib gsd griddataformats tidynamics\n \n # Install package in development mode with explicit build step\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='81b8ef51e5bc1aa2824294ac6c52818c74975658', commit_date=1741727282.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python mock gsd griddataformats scipy matplotlib biopython networkx tidynamics\n\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"No setup files found in expected locations. Please check repository structure.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='84ee67b99fc3bf165d2f58057fac3315d8bb33af', commit_date=1727431157.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install packaging\n \n # Try to find and build from package directory\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n fi\n else\n echo \"Package directory not found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='8599e47b77a89486a1ffe97a3f146751611d9595', commit_date=1680132537.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='8c3577f5a72bee654d94367e4bef51791ffa5d0b', commit_date=1591177328.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six gsd mmtf-python networkx matplotlib biopython griddataformats GridDataFormats scipy tqdm joblib mock\n\n # Install package in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='93c4a97761469a2fd013c280d04435ae178f2c44', commit_date=1693273052.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions < 3.9 as MDAnalysis requires Python 3.9+\n if [[ $(echo \"$version\" | cut -d. -f2) -lt 9 ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm scipy biopython\n\n # Ensure we're in the root directory\n cd ${ROOT_PATH}\n \n # Look for package subdirectory\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Try to build and install the package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\n \n # Install remaining dependencies\n micromamba run -n \"asv_${version}\" pip install -e .[test,doc]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='95fedb590d1afd268c0a643302cd703b8756f5d3', commit_date=1685194826.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Check Python version requirement (MDAnalysis needs >=3.9)\n if (( $(echo \"$version\" | cut -d. -f1,2 | sed 's/\\.//' | bc) < 39 )); then\n echo \"Skipping Python $version as MDAnalysis requires Python >=3.9\"\n continue\n fi\n\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \\\n numpy scipy cython pytest compilers \\\n gsd networkx matplotlib tqdm pandas\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n\n # Install build dependencies and package\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e \".[test,analysis]\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='9a2cd43fccd4426f91b195ea9902e5b78a6c2e3b', commit_date=1710090427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel matplotlib pandas\n micromamba run -n \"asv_${version}\" pip install GridDataFormats mmtf-python networkx fasteners mda-xdrlib waterdynamics pathsimanalysis mdahole2\n \n # Install the package in editable mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='9ba1ab964920acfc986d8e264f78c965e062e9d0', commit_date=1511010257.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n # Using older versions since this is a 2017 commit\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<=1.13\" \"scipy<=1.0\" \"cython<=0.27\" setuptools wheel pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"sphinx<1.7\" \"sphinx_rtd_theme<0.3\"\n \n cd ${ROOT_PATH}\n # Build and install MDAnalysis with optimizations disabled to avoid timeouts\n CFLAGS=\"-O0\" CXXFLAGS=\"-O0\" micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a1bca526f473325f91c12fb15c887243a2a9244b', commit_date=1646736472.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode with older Cython version\n cd ${ROOT_PATH}\n # First install the core package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable package/\n # Then install the test suite\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a3672f216aa162f2549d1712fad0118b2cc98d49', commit_date=1734398599.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install gsd mmtf-python networkx scipy tqdm packaging matplotlib biopython griddataformats\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a6034750dc47c8904a297efa184292c73c0690bb', commit_date=1692115614.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy \"packaging<22\" pytest\n \n # Install package in development mode\n if [ -f \"package/setup.py\" ]; then\n cd package\n fi\n \n # Try to build and install\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n \n # Return to root directory\n cd ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a64eed98b38307e4699b59eef9f265cbead37ad6', commit_date=1607980019.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with explicit numpy dependency\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable .\n micromamba run -n \"asv_${version}\" pip install numpy scipy\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a6edec02af44fbb4589ef1da25a54a4cc8895ee4', commit_date=1671201733.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd netcdf4 bzip2 mmtf-python\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install pytest-xdist sphinx sphinx_rtd_theme\n\n # Ensure we're in the root directory before building\n cd \"${ROOT_PATH}\"\n\n # Build and install MDAnalysis with optimizations\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n \n # First try pyproject.toml-based install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n # Fallback to setup.py if exists\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found. Checking package subdirectories...\"\n # Check for package subdirectories\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n cd \"${ROOT_PATH}\"\n elif [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" python setup.py develop\n cd \"${ROOT_PATH}\"\n else\n echo \"No installation method found. Build failed.\"\n exit 1\n fi\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='aaa4456db50e237cf580c8c986c00d7c5fbe3075', commit_date=1703622753.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n\n # Navigate to package directory if needed\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd ${ROOT_PATH}/package || exit 1\n fi\n \n # Build and install MDAnalysis with specific compiler flags and additional dependencies\n export CFLAGS=\"-DXDR_GETPOS_RETURNS_UINT32=1 -DXDR_SETPOS_RETURNS_INT=1\"\n # Try installing with conda-forge compilers first\n micromamba install -y -n \"asv_${version}\" -c conda-forge gcc gxx\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b3208b39aab61be53f8b610f1fef628f83262205', commit_date=1725909222.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n # Try to find and use setup.py in package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n # Install MDAnalysis in development mode with verbose output\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b5ba8278b3e09b80109aa06f77832be00f8752f0', commit_date=1510724778.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # For older versions of MDAnalysis, build_ext is needed before install\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b672b595b57f6862d486391d646cf30c31fd8501', commit_date=1598490143.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy\n \n # Build and install MDAnalysis with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b7f36bd148f1eed47f2dc935b89d28c8cae468c4', commit_date=1541446943.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install build dependencies first\n if [ -f \"${ROOT_PATH}/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n fi\n \n # Build and install MDAnalysis with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='bc95e31af1bd1a583161318ab381d005452d48ea', commit_date=1611524871.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='bdb1352f4743aa2101ba2d6b3c9c4fbeb5ae8584', commit_date=1680212962.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='be4b6ee8fa243a0d9e18b936a3d018f2b7418914', commit_date=1650356257.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid Cython errors\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython==0.29.36\" \"numpy<2.0.0\"\n\n # Install required dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"networkx>=2.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.5.0\" \\\n \"biopython>=1.80\" \\\n \"griddataformats>=0.4.0\" \\\n \"packaging\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\"\n\n # Build and install MDAnalysis\n cd \"${ROOT_PATH}\"\n if [ -f \"package/setup.py\" ]; then\n cd package\n # Use --no-build-isolation to ensure our carefully installed dependencies are used\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c07b5c8897688d778e57e1ef34be86f58c969fe7', commit_date=1607478583.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c3289d8994936ce7dbe7842e8877d597ca96360a', commit_date=1752273263.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c565b9d3a11508604a1217e37199ac17a8c618f2', commit_date=1654106359.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython matplotlib tqdm pandas tidynamics\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n # Use build system if pyproject.toml exists\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n # Use setup.py if available\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py or pyproject.toml found in package root directory.\"\n # Try looking in package subdirectory\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Could not find build configuration. Cannot build package.\"\n exit 1\n fi\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c56e8df543e1aba21959a7c7b3029eacd57d9130', commit_date=1661799771.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<2.0.0\" scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n\n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3,<4.0.0\" \\\n \"mock>=3.0.5\" \\\n \"packaging>=20.4\" \\\n \"pytest-xdist>=1.31.0\" \\\n \"pytest-cov>=2.10.1\" \\\n \"pytest-timeout>=1.4.2\" \\\n \"hypothesis>=5.19.0\" \\\n \"psutil>=4.3.1\" \\\n \"biopython>=1.80\" \\\n \"duecredit>=0.9.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"tqdm>=4.43.0\" \\\n \"joblib>=0.12\" \\\n \"fasteners>=0.15\" \\\n \"networkx>=2.0\" \\\n \"threadpoolctl>=2.0.0\"\n\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Build and install MDAnalysis in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c5cbc2551c1175e8d13887783c7ab2894607ac92', commit_date=1671293813.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies that might be needed for compilation\n micromamba install -y -n \"asv_${version}\" -c conda-forge gsd netcdf4 bzip2 gcc gxx\n \n # Install MDAnalysis with verbose output and no build isolation\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c620b141f018628356bb9cdd16eefa640b6080ba', commit_date=1671200774.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n \"numpy<2.0\" \\\n \"cython<3.0\" \\\n setuptools \\\n wheel \\\n pip \\\n build\n\n # Try building and installing from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c6f1a5a5663913f00cc5f727ad0e662bbf23f18f', commit_date=1617010037.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\" setuptools wheel\n \n # Build and install MDAnalysis in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c70504d99e8b6ff7f61778cff1f5956da708ddad', commit_date=1619628547.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c815614b5ae8ed86eaa0d68e10451fde7e72242b', commit_date=1671293292.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with compatible versions\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install requirements one by one to handle dependencies better\n while IFS= read -r requirement; do\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < \"${ROOT_PATH}/package/requirements.txt\"\n fi\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='cb05695ca422c216406a0eae4040c782a2a03812', commit_date=1629822068.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel cython numpy scipy\n \n # Install optional dependencies that might be needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps matplotlib networkx gsd biopython\n \n # Install the package in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='d2e22ffb0cb46af5266e39b940d7f00c1ca293c1', commit_date=1534167809.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='d73b653f19e8446bbb9de51bb41d71f78d148d30', commit_date=1534803427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis with test dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='da77f2bead8c4a634d2ba5b61cd7d7f841c01c0b', commit_date=1671205345.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist\n # Build and install MDAnalysis using setup.py since pyproject.toml is not found\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='daee516f23ead8e42c2e42b7636f9ec243ab306e', commit_date=1603119467.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='dcfa60a2ee0bcee7f54e969666950941905d825a', commit_date=1621773545.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e213f2be8e8741efc7cdddd35dc4bd2d88e0ff85', commit_date=1745000938.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n \n # Install package-specific dependencies\n cd ${ROOT_PATH}/package\n if [ -f \"requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\n fi\n \n # Build and install package\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in package directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e3966303776577e15a043daeceff5a591370398a', commit_date=1534255980.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install additional build dependencies\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python mock joblib\n \n # Install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e8fbd529fc55cb187d38bdef141d74757f22bdc5', commit_date=1594518308.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='eab18cb8418ddb1dd72b44f474833de4a2999884', commit_date=1654100638.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n mmtf-python \\\n networkx \\\n scipy \\\n matplotlib \\\n tqdm \\\n \"cython<3.0\" \\\n \"numpy<2.0\" \\\n pip \\\n setuptools \\\n wheel \\\n build\n\n # Try building and installing with specific C compiler flags\n cd ${ROOT_PATH}\n export CFLAGS=\"-fcommon\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v .\n\n # If that fails, try alternative installation method\n if [ $? -ne 0 ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='eae5845cf5488ae1db1cdcc2075f68406291721e', commit_date=1517964764.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six mmtf-python mock biopython networkx gsd joblib setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='ee4759293e1a4a5109c6b66e133acb1af7d24b0d', commit_date=1567703043.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='f5e9603f35b1e1587c1a1583793374fbfa0f80c5', commit_date=1629232880.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"cython>=0.29.13\" \\\n \"numpy>=1.16.0\" \\\n \"biopython>=1.74\" \\\n \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.0.0\" \\\n \"joblib\" \\\n \"mock\" \\\n \"packaging\" \\\n \"pytest\" \\\n \"pytest-xdist\" \\\n \"pytest-cov\" \\\n \"pytest-timeout\" \\\n \"psutil\" \\\n \"hypothesis\" \\\n \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\" \\\n \"duecredit\"\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='f7a6f47e9c8c4637770c2c0cc0c20da841d11622', commit_date=1516881817.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools pip wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics six\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fb9e0bc786b21c15cefe0027fc83a441e1b19950', commit_date=1685186356.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm pandas biopython griddataformats scipy\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n \n # Build and install MDAnalysis\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fe22dc3794f1f5d466f9128e4c7050fa0d58e62f', commit_date=1619962288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install cython numpy setuptools wheel\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fed8be34a3434a621bacd438d2f9307139a24511', commit_date=1511384425.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Skip Python 2.7 as it's not available in conda-forge anymore\n if [[ \"$version\" == \"2.7\" ]]; then\n echo \"Skipping Python 2.7 as it's no longer supported\"\n continue\n fi\n\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy>=1.16\" \"scipy>=1.5\" cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies for the 2017 commit\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.16\" \"biopython>=1.71\" \\\n \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"six>=1.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \\\n \"mock>=2.0.0\" \"psutil>=4.0.0\" \"fasteners>=0.12.0\" \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \"packaging>=20.0\" \"pytest>=3.3.0\" \"pytest-xdist>=1.4.0\" \"pytest-cov>=2.5.1\"\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # Use older build approach appropriate for 2017 commit\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='ff7ffa10901e2df2be12c3d3dd78e4e0a262e90e', commit_date=1614816697.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with specific version constraints\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.74\" \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.9.3\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \\\n \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode with explicit numpy requirement\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='mdanalysis', repo='mdanalysis', sha=None, commit_date=0.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"asv_${version}\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n fi\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "Task(owner='nvidia', repo='warp', sha='5495dc762dae2f09b648588d0f979e03ea3ef88b', commit_date=1741386626.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --verbose --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='626fc946dcfe2150b6aed956c57e89ec907ca44a', commit_date=1746035128.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Build and install WARP with CUDA support\n CUDA_PATH=/usr/local/cuda micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='9958a89058d16e7ac634c46b37d9aad6c14b3f10', commit_date=1740864850.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install torch cuda-python\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" python build_lib.py\n # Now install in editable mode\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='a447d70c372b4dbe1b574ebf587c51c9742272db', commit_date=1748714623.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build steps\n micromamba run -n \"asv_${version}\" pip install warp-lang\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Now try the editable install\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='a81f7e773f2905e06fe52262002c2e34a5daa4d8', commit_date=1743362346.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy cmake ninja pytest\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='d641e89a288746c380ef9b4871f45b0d862fd69e', commit_date=1755703901.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific dependencies and build requirements\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n # First run build_lib.py to generate required libraries\n cd ${ROOT_PATH}\n # Add missing climits header to fix build error\n sed -i '1i\\#include ' warp/native/bvh.cpp\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Then install WARP without CUDA support since error suggests basic build issues first\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='dc693d89d5b85ac7e72c7f4e226eb58a5d54131f', commit_date=1751384285.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with CUDA support\n micromamba create -y -n \"asv_${version}\" -c conda-forge -c nvidia \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \\\n pytest ninja cmake cuda-toolkit=11.8 cuda-nvcc=11.8 cuda-libraries=11.8\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n\n # Set up CUDA environment\n export CUDA_PATH=/opt/conda/envs/asv_${version}\n export PATH=$CUDA_PATH/bin:$PATH\n export LD_LIBRARY_PATH=$CUDA_PATH/lib64:$LD_LIBRARY_PATH\n\n # Create necessary symlinks for CUDA headers\n mkdir -p /usr/local/cuda/include\n if [ -d \"$CUDA_PATH/include\" ]; then\n ln -sf $CUDA_PATH/include/* /usr/local/cuda/include/\n fi\n\n # Build and install WARP\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='0422c82a80b3ec0dc7fcbc69562f99e35358ee80', commit_date=1680293750.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='283f5e7480a7c39f0e11abe63e3c1ecd8b5d8911', commit_date=1616243491.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file to fix the invalid version error and setup.cfg\n mkdir -p \"${ROOT_PATH}/control\"\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n echo \"[metadata]\nversion = 0.0.0.dev0\" > \"${ROOT_PATH}/setup.cfg\"\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n\n # Try installing in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='2ce4bbd983ce00aa2998bce00c7c161ff7c0f1d5', commit_date=1640530701.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file since setup.py fails due to invalid version\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='4ef15c4e95ec73cf5fc4d571be103e67b00caadf', commit_date=1647713524.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required build dependencies\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib setuptools wheel\n\n # Fix the version in setup.py before installing\n sed -i 's/version='\"'\"'dev'\"'\"'/version='\"'\"'0.0.0.dev0'\"'\"'/' \"${ROOT_PATH}/setup.py\"\n \n # Build and install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='82f3fe343422289f076d6883a2448d169606f821', commit_date=1701474288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='a042895507367a5d001af7d3febfd8f386497554', commit_date=1739343810.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='a111b03e651d7c1828d264c1b143d9ccc9030b3f', commit_date=1640969033.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='abeb0e46a3d56c98b4534f73202a5a7ef5a0af87', commit_date=1751727883.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='c3c659638fb22bde11e40868f80f540060c50b40', commit_date=1616196419.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='f7d18f17bf90bfb99a06648982b22d1e4af6ccd2', commit_date=1686374157.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='044f1b00a62c9083ce3212a3e69046c9afac0de6', commit_date=1662470783.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='04860335c82d557e663b4cfa218663d1c7bf65fd', commit_date=1689974588.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='05ce8141bc71ad21e55be4d1b3f6609f65e91e49', commit_date=1603277025.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='06e566eb86cfd8c6107cf3bc2b477c97b80002a3', commit_date=1705578508.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='08b6157b0e18480569a5cc08efd44dabad9e60ce', commit_date=1701071115.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='092caed407f3b60de7677d4353bfe0db20a2faab', commit_date=1682603301.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0937b4ab48136eb161ead4abd4806d0708b1bb4c', commit_date=1607961058.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0c65bbfe8ce816a181780d2a249c94dd653e115a', commit_date=1642433763.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0c74b8b7d5cdb60dc3a3240cdb36af40b9f40288', commit_date=1615733031.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0de3b0d1eaacee9f7b15cabc05752cba945c7621', commit_date=1644500459.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1045d16ec13b1cab7878e7555538573d1884aad3', commit_date=1614793397.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='119b837a43d939ec02cf2aeba5bd203f8ebab4c7', commit_date=1649335379.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1415a2890b0451d80feef2d81e921a15d2b9d680', commit_date=1685431571.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='15cb8695a27eb8d4dc281ac3c937e12db8b5a6c1', commit_date=1604221237.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle the multiple packages error\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Create setup.cfg to explicitly specify packages\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\npackage_dir =\n = .\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='16625450b58f555dc3955d223f0c3b64a5686984', commit_date=1652277602.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='191f96908d6bbb46cf7293fb0ac1299f1e8b783d', commit_date=1719904631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1a78993217b52745d63a3495a819efd7f1b0530a', commit_date=1691676945.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1bb0306a1309f9a57d8c652dec731a95cbd0052b', commit_date=1610422145.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to avoid package discovery issues\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1d1aadd0711b87d2a11c80aad15df6f8cf156712', commit_date=1642210241.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1db03ce68be362baa12330ae3f42b9673863fa52', commit_date=1626800410.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='210740408a732940430047fe9437c2193735573f', commit_date=1719586131.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='226da0d7c458816776549c2580abaa4782dc4c48', commit_date=1637400914.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='28831879f2b5a8f623623735480399735c1bb742', commit_date=1755578702.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2e213c618841f3635885bab034606512c40a7fd4', commit_date=1646246849.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='320b4c61f97fec3facc3c4c2b4cf9351d3425b44', commit_date=1596283836.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='32c5d05cbd7551fd983a250945013239e0e5cb94', commit_date=1631705680.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='408f561b87f9955e92619cbf924d595a2655344f', commit_date=1678175921.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='432778464cbffc8ca675c1df786c31f8c23fc62c', commit_date=1642715056.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='45a817933ef51a24f0c5863c1026b4fe664b26fa', commit_date=1608647213.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to handle the multiple packages issue\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4685cf624582cbc9a35d646f239347e54db798dc', commit_date=1652472968.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='49d26cb63fefe43c9b310136e4f2c172d8c433cb', commit_date=1599140563.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools==60.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and environment variables\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n\n # Install scikit-learn in development mode with specific build settings\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4bc61a09eac44a86758c6a02a2b47f912a696d3b', commit_date=1719575535.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4e44edebf9e811c718c2842b65db2eb41ba01786', commit_date=1723709827.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='52d93e141a5d874bd288f15cc1d8990f09721aad', commit_date=1754304060.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='55a65a2fa5653257225d7e184da3d0c00ff852b1', commit_date=1695213631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='584d413fec25fb5c38f06c1fe88e652111395330', commit_date=1675930888.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='598045569c8f96fb345059f5316ea8903d374ff4', commit_date=1615476313.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5a332e77a10a44107276843d8532ef79f239c8f3', commit_date=1681854133.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5a850eb044ca07f1f3bcb1b284116d6f2d37df1b', commit_date=1657115862.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5b46d01f8d5015114644b91ce88ee4bc4fa5386d', commit_date=1680769691.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5c4e9a0fd82dd096bbdf78b69c264a741c768a86', commit_date=1690911539.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5ffec3233034e0413f548380d4a22f4e0eecae94', commit_date=1678722797.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='63a1a31a17f9bd9cdf617b2cf04bfaf2f32f0a17', commit_date=1639082235.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='64d54483edfa55ab44d836f9b08ff1bd38f7f6bb', commit_date=1627659978.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6595229d116b128c5b36f204dc941f69e14abc7f', commit_date=1718288797.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='673f6259f3fb7bd2a057b1889e23b280fe638998', commit_date=1612389138.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='67ca4dda1d61c9ad95ed68b04cb40da2c822e960', commit_date=1678114713.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='687e84a126965b4179b02d86041a9e997eba87c9', commit_date=1751036214.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='691b00f4b7d169d38cc46cf14668a5029b2df8eb', commit_date=1728910531.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6d7d0f275db08ca97e7ce9765e5e8f0604e490dd', commit_date=1641981733.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6f91cbebe5c439d5712860315616b70cd2ca9f87', commit_date=1633437528.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='701537ecca85a333449814c82ac2b78db5f534a8', commit_date=1682379515.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='70ca21f106b603b611da73012c9ade7cd8e438b8', commit_date=1713791446.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='767e9ae7e4fec8bea36c0433ab42f500aacfde64', commit_date=1651223539.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='769da3d51feef52b97b8129bf4700cf088a247b2', commit_date=1613120619.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" wheel\n\n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='76c28285d3d3eb6a2834b7d1db01e296187c60b8', commit_date=1677233852.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7798fd829d0eb3637da17cc5cb359bf52efa551f', commit_date=1630429058.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7a2f5ca3a8478333f194a085b0c3635d75fcdf4d', commit_date=1678442780.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7aabe53e730947df0f6f1f85d640e6daea5bfc9f', commit_date=1634742992.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7af0a18996efb10fcbcdb15c7c132d2eb36be736', commit_date=1687508727.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7b715111bff01e836fcd3413851381c6a1057ca4', commit_date=1624465784.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7c835d550c1dcaf44938b1c285db017a773d7dba', commit_date=1662054353.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7f1d4d05064a160e19f786bfbac8996cf0ecac5d', commit_date=1707518612.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "Task(owner='scikit-learn', repo='scikit-learn', sha='80ebe21ec280892df98a02d8fdd61cbf3988ccd6', commit_date=1638310769.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='830864629e21509980a9c3904c9bb7bf2be8fec5', commit_date=1655213679.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8525ba5d3c3b5423a5599e654ce73b931882a434', commit_date=1754632277.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='863c552c448118249563f0e709ea83a1a9b2fc7f', commit_date=1612010007.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "Task(owner='scikit-learn', repo='scikit-learn', sha='871892cef9bc70224233fdf2140c896874c07b57', commit_date=1659000389.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='88c2db24bd3efb631372aa971270d6cb690d914d', commit_date=1726476355.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='897c0c570511be4b7912a335052ed479ac5ca1f3', commit_date=1705781316.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='8a71b840d3d7f6e5db9f9faf3b6c44f8ed6a3850', commit_date=1705345976.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='8a7684705f636a8dfcde8e2239d2e0bcd624ac54', commit_date=1647426404.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='8ad7c3f02daae525ee83231fbd33fb65e8e05288', commit_date=1633621378.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8eef0e767c4bdd2fdb83f51b162afa32386d5973', commit_date=1692883694.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='9590c07128d3bad5978f08eeb34613d347b96e38', commit_date=1719499549.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='961afc72e0222cb108b77b68c145ea4424f089da', commit_date=1751880029.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='99410b1bdea296a0df48026aaee85472bf3cb7cf', commit_date=1625818419.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='99562100e941f0972a5a65484ff80f407eeb5137', commit_date=1674572593.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='9c9c8582dff9f4563aa130ef89f155bad0051493', commit_date=1668796144.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='9e38cd00d032f777312e639477f1f52f3ea4b3b7', commit_date=1705585714.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a09a62eda27720a0cb949ea24b1e21d358f95176', commit_date=1676040745.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a47d569e670fd4102af37c3165c9b1ddf6fd3005', commit_date=1652372475.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='a67ebbebc173007735e62eef7878c08435d28d89', commit_date=1718987804.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='a85b14d4799ba7c4e13e0e942e599f8077dc182e', commit_date=1679350355.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "Task(owner='scikit-learn', repo='scikit-learn', sha='a8b1905e8f977fcd4d6a348678bb1e82ed9b3310', commit_date=1606807943.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { + "Task(owner='scikit-learn', repo='scikit-learn', sha='aa2131f9bdcfa7ff0dacfd6a47c207cbb68a49fa', commit_date=1751370298.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='abbeacc2daee2b213274924a5a4ffe6cbafb0627', commit_date=1651693256.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='aca8f20db461ca0dd70b02b6a1f41b957b2b12ee', commit_date=1665069106.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ad91259f20529306efe445f5a1da4dccc8c81b5a', commit_date=1663256210.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b1202af3b379e698539a2719f2b1e28706ce5388', commit_date=1638654791.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b3aea0053dadcb67adfc39a90c70ffca607a534f', commit_date=1643205359.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b5d55b4fd19ca97d68e4e34e5822865b0a8e90d2', commit_date=1651487470.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b609562c610822ad4b3c11a9e7a22710aba438af', commit_date=1637744681.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b6b6f63ebefe16403d11e8a0a2281b6e2a811933', commit_date=1678791874.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b8229daafee0e50690d4b8447f93cf1069ba6880', commit_date=1701274890.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='bc7cd3189bc817545791071515693445e1e271db', commit_date=1617352203.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='bf0886bae0ccbc8c5d285b6e2affe7e40474f970', commit_date=1619532370.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c0eb3d37244cc4bf35b82e18bff37320e198b038', commit_date=1670930060.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c217527af5744b9d0db8761c1e3667552312e5e7', commit_date=1652946509.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c300a8f2178fcae847f82ad548fe9452f2ba8bbb', commit_date=1658415495.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c47205fb7d45de50de4afa9760d974e754f103e1', commit_date=1707735651.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c63b21ec309f742defd56033eadfc8f7bf5b510b', commit_date=1711607317.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c6ad7361c8fc68188b83070aa0b6b797058c06fa', commit_date=1646214356.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9138537790cc0fa352968eed927433fe17ee17c', commit_date=1701967415.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9525d1600ecd526b9b98e275fc1b85782c25dea', commit_date=1634072165.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9f9b041758c3fa5fdf74b15995a3e3607b0ad5a', commit_date=1737104589.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='caaa1f52a0632294bf951a9283d015f7b5dd5dd5', commit_date=1732650609.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", @@ -95,41 +1145,176 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d5901462551283b689284e582152666faf0dc1da', commit_date=1676911719.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='d5df806c9715443f5dc7de9023a1b7aa2045eae4', commit_date=1677234005.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d85b1d3302a3ff45179a5826a747e8ee2562f143', commit_date=1674489554.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d8d5637cfe372dd353dfc9f79dbb63c3189a9ecc', commit_date=1644836117.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d92c76986ac6553ce8e0fe2c1bbaea500c105cc7', commit_date=1679480310.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dc1ea2751e8f4e18f61c7e6d767cf42c6e636256', commit_date=1608485758.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='dc580a8ef5ee2a8aea80498388690e2213118efd', commit_date=1670501069.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='de67a4420f1713058070802ad593cbcd2ee2d5f3', commit_date=1677582108.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='df692c03c1a6003878c6fc4d2f9f222d304dcee3', commit_date=1649449476.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dfaef0c6c3aef0d00c72573728c90c1d542e2957', commit_date=1657123469.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dfda968f1d0b3b1ecaeb4125d3e903416eaf18ec', commit_date=1678100532.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e113897235feaf309eaaed24001ca96f3608602f', commit_date=1648574496.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for potential warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e1db2a8173ca37e561cdfa4384481501c4d50868', commit_date=1644639631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e1e8c66e05dd638ae785855bfb637e0180aea99c', commit_date=1642748755.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='e411c29625e66f7e440f1acce4069e01201cf122', commit_date=1672782103.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e64714637d8cc9f4724ae21ea500e4bdc57b0a39', commit_date=1629207428.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e7ae63f784c5f85af41cf8f346d194775f01f333', commit_date=1694440694.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='eae3f294d3ba8ae636730537faef4cdd612083ff', commit_date=1678119642.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eb2920766d7b2ffb04359a1dc8b6c611960931b7', commit_date=1725568507.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eb85684feb0505694e66365ba9f4d10a409f8f0b', commit_date=1697017427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ee524f455dbf0285f7b121a08f1e9613a518abcf', commit_date=1617906457.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eecde00c7a706546271ff40d7d492b5f27046d2b', commit_date=1619516333.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='ef200eb16813f4e579f3a4e6cd4603e16f72f5a8', commit_date=1680030341.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ef82b778ecaeee11d6bfd005f59e882410d330b6', commit_date=1751882162.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f1d3417b086550be670cbfbb5b3c1760ac99203f', commit_date=1646068982.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f4ed8ef5e4498c9de2ff4b713c1695d6f312ffba', commit_date=1733748660.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f812e2a27619650463cb12d765f1b443b47c0828', commit_date=1628181136.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='f86f41d80bff882689fc16bd7da1fef4a805b464', commit_date=1695653805.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f965fcc0634e47b7230e120850cf7bb4efeb96e7', commit_date=1674829022.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='f96ce586eecb361d53b192ea3b44098d1bd49a77', commit_date=1637843007.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fba028b07ed2b4e52dd3719dad0d990837bde28c', commit_date=1733159260.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fe08016877e8bd715816cf9fbfb1fb697c3446d2', commit_date=1754300286.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fee76cc5405c01e283a3b079dcb865f3017d5007', commit_date=1705008338.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ff9344f3d8d11d38fa3a2497199113e5bac9537c', commit_date=1666642605.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index a492418..77147bd 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -18,7 +18,7 @@ from datasmith.scrape.utils import _parse_commit_url logger = configure_logging() -# logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) +# logger = configure_logging(level=10, stream=open(Path(__file__).with_suffix(".log"), "w")) def parse_args() -> argparse.Namespace: diff --git a/src/datasmith/agents/config.py b/src/datasmith/agents/config.py index 7c703f5..365412f 100644 --- a/src/datasmith/agents/config.py +++ b/src/datasmith/agents/config.py @@ -2,6 +2,7 @@ import os import dspy +from portkey_ai import PORTKEY_GATEWAY_URL logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -10,7 +11,14 @@ def configure_agent_backends() -> None: model = os.getenv("DSPY_MODEL_NAME") backend_url = os.getenv("DSPY_URL") - if anthropic_api_key := os.getenv("ANTHROPIC_API_KEY"): + kwargs: dict[str, str | dict[str, str]] = {"model_type": "chat"} + if portkey_api_key := os.getenv("PORTKEY_API_KEY"): + api_key = "unused-by-portkey" + model = os.getenv("PORTKEY_MODEL_NAME", "@anthropic/claude-3-5-sonnet-latest") + backend_url = PORTKEY_GATEWAY_URL + kwargs["headers"] = {"x-portkey-api-key": portkey_api_key} + kwargs["custom_llm_provider"] = "openai" + elif anthropic_api_key := os.getenv("ANTHROPIC_API_KEY"): api_key = anthropic_api_key model = os.getenv("ANTHROPIC_MODEL_NAME", "anthropic/claude-3-opus-20240229") backend_url = None @@ -24,5 +32,5 @@ def configure_agent_backends() -> None: logger.warning("Environment variables for DSPY model or API key are not set.") return - lm = dspy.LM(model=model, api_base=backend_url, api_key=api_key, model_type="chat") + lm = dspy.LM(model=model, api_base=backend_url, api_key=api_key, **kwargs) # pyright: ignore[reportArgumentType] dspy.configure(lm=lm) diff --git a/uv.lock b/uv.lock index 3be8a94..d8eb54c 100644 --- a/uv.lock +++ b/uv.lock @@ -326,6 +326,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382, upload-time = "2025-08-01T21:27:07.844Z" }, ] +[[package]] +name = "cached-property" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/4b/3d870836119dbe9a5e3c9a61af8cc1a8b69d75aea564572e385882d5aefb/cached_property-2.0.1.tar.gz", hash = "sha256:484d617105e3ee0e4f1f58725e72a8ef9e93deee462222dbd51cd91230897641", size = 10574, upload-time = "2024-10-25T15:43:55.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/0e/7d8225aab3bc1a0f5811f8e1b557aa034ac04bdf641925b30d3caf586b28/cached_property-2.0.1-py3-none-any.whl", hash = "sha256:f617d70ab1100b7bcf6e42228f9ddcb78c676ffa167278d9f730d1c2fba69ccb", size = 7428, upload-time = "2024-10-25T15:43:54.711Z" }, +] + [[package]] name = "cachetools" version = "6.1.0" @@ -958,6 +967,7 @@ dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pandas" }, + { name = "portkey-ai" }, { name = "requests" }, { name = "ruptures" }, { name = "simple-useragent" }, @@ -999,6 +1009,7 @@ requires-dist = [ { name = "gitpython" }, { name = "numpy" }, { name = "pandas" }, + { name = "portkey-ai", specifier = ">=1.14.3" }, { name = "requests" }, { name = "ruptures" }, { name = "simple-useragent" }, @@ -3248,6 +3259,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "portkey-ai" +version = "1.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "cached-property" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "types-requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/e2/96c00bacad299e4a117b1e13d35641eca8b3719dd1748dbb663b40061bb0/portkey_ai-1.14.3.tar.gz", hash = "sha256:4ab119aab5f1956bec31832c8ed09752ad8d5fe19ba32bbff08465456bc1e35d", size = 438978, upload-time = "2025-07-27T16:13:22.096Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/c0/1c3bb9aff997153a29b568af798f08d373220c8cb6d947f4c320f2165e73/portkey_ai-1.14.3-py3-none-any.whl", hash = "sha256:11b5d60f458f1bf5c50988171e4b88560222a70d5830490e7e847159b4bcdadf", size = 873441, upload-time = "2025-07-27T16:13:20.715Z" }, +] + [[package]] name = "pre-commit" version = "4.3.0" From 428658e3fb0ab91606093fab3ef8db0c78b3e76f Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Thu, 28 Aug 2025 22:41:43 +0000 Subject: [PATCH 13/20] Better backup sync --- Makefile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index a3f31e7..27afc37 100644 --- a/Makefile +++ b/Makefile @@ -6,20 +6,20 @@ install: ## Install the virtual environment and install the pre-commit hooks .PHONY: backup backup: ## Create a backup of the datasets, results, and analysis directories - @echo "Creating backup archive" + @echo "Syncing backup mirror with rsync" @/usr/bin/env bash -euo pipefail -c '\ if [ ! -f tokens.env ]; then \ echo "❌ Error: tokens.env file not found"; exit 1; \ fi; \ - BACKUP_DIR=$$(grep -E "^BACKUP_DIR=" tokens.env | head -n1 | cut -d "=" -f2-); \ + BACKUP_DIR=$$(awk -F= '"'"'/^BACKUP_DIR=/{print $$2; exit}'"'"' tokens.env); \ if [ -z "$$BACKUP_DIR" ]; then \ echo "❌ Error: BACKUP_DIR not defined in tokens.env"; exit 1; \ fi; \ - mkdir -p "$$BACKUP_DIR"; \ - zip -qr "$$BACKUP_DIR/datasmith.bckp" scratch/artifacts/benchmark_results scratch/artifacts/raw; \ - cp -f scratch/artifacts/cache.db "$$BACKUP_DIR/datasmith.cache.bckp"; \ + DEST="$$BACKUP_DIR/datasmith.mirror"; \ + mkdir -p "$$DEST"; \ + rsync -a --delete --human-readable --info=stats1 \ + scratch/ "$$DEST/scratch/"; \ ' - .PHONY: check check: ## Run code quality tools. @echo "Checking lock file consistency with 'pyproject.toml'" From 03d8266a0c58954088753b0f2d5d3080e74c65ba Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Thu, 28 Aug 2025 22:45:49 +0000 Subject: [PATCH 14/20] update benchmarking --- .gitignore | 4 + README.md | 24 + scratch/context_registry.json | 631 +++++++++++++++++- scratch/scripts/benchmark_commits.py | 168 +++-- .../scripts/initialize_context_registry.py | 1 - .../scripts/parallel_validate_containers.py | 50 +- scratch/scripts/synthesize_contexts.py | 36 +- scratch/scripts/update_context_registry.py | 21 + scratch/scripts/validate_containers.py | 7 +- src/datasmith/agents/config.py | 5 +- src/datasmith/agents/context_synthesis.py | 28 +- src/datasmith/docker/context.py | 214 +++--- src/datasmith/docker/entrypoint.sh | 15 +- src/datasmith/docker/orchestrator.py | 41 +- src/datasmith/docker/validation.py | 4 +- uv.lock | 83 ++- 16 files changed, 1102 insertions(+), 230 deletions(-) rename src/datasmith/docker/context_registry.py => scratch/scripts/initialize_context_registry.py (99%) create mode 100644 scratch/scripts/update_context_registry.py diff --git a/.gitignore b/.gitignore index c248a46..b9fc3ca 100644 --- a/.gitignore +++ b/.gitignore @@ -196,3 +196,7 @@ cython_debug/ tokens.env benchmark_results/ scratch/artifacts/ +output + +# Database files +*.db diff --git a/README.md b/README.md index 9c736dd..efb3b45 100644 --- a/README.md +++ b/README.md @@ -170,6 +170,29 @@ $ python scratch/scripts/filter_commits.py \ --max-repos 350 \ --threads 8 \ --procs 8 + +# Build contexts for all commits. Each context is a (repo, commit) pair with an associated build_env.sh script to install dependencies. Some reasons a context might fail to build (and get filtered out): +# 1. Commit couldn't be checked out +# 2. Commit didn't have an asv.conf.json file +# 3. We could not build the asv environment for the commit. +# 4. We could not run a quick asv run to ensure that the benchmarks run. +$ python scratch/scripts/synthesize_contexts.py \ + --commits scratch/artifacts/raw/commits_filtered.jsonl \ + --output-dir scratch/artifacts/results_synthesis_oth/ \ + --context-registry scratch/context_registry_updated.json \ + --max-workers 32 \ + --limit-per-repo -1 \ + --max-attempts 5 + +# This should create a file called scratch/context_registry.json with all the contexts + build.sh scripts to build those contexts. + +# Verify that the contexts can be built and the benchmarks can be run. +$ python scratch/scripts/parallel_validate_containers.py \ + --commits scratch/artifacts/raw/commits_filtered.jsonl \ + --output-dir scratch/artifacts/results_verification/ \ + --context-registry scratch/context_registry.json \ + --max-workers 32 \ + --limit-per-repo -1 ``` ### 5. Benchmark all commits @@ -189,6 +212,7 @@ The `dependency_recommendations.json` file is a dictionary that contains recomme # in userspace: $ python scratch/scripts/benchmark_commits.py \ --filtered-commits scratch/artifacts/raw/commits_filtered_sm.jsonl \ + --context-registry scratch/context_registry.json \ --max-concurrency 30 \ --num-cores 2 \ --asv-args "--interleave-rounds --append-samples -a rounds=2 -a repeat=2" \ diff --git a/scratch/context_registry.json b/scratch/context_registry.json index 9ee14e6..51e1752 100644 --- a/scratch/context_registry.json +++ b/scratch/context_registry.json @@ -1,15 +1,265 @@ { "contexts": { + "Task(owner='apache', repo='arrow', sha='3d6d5817313920abc71c854828d95b63b2562938', commit_date=1726645863.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='apache', repo='arrow', sha='77f099fb5c324afc8ee38cda4976bf20a08e7a4a', commit_date=1668536482.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to wheel if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='arviz-devs', repo='arviz', sha='904129035bb29d1316833cf6f5f1b5ccf69973e3', commit_date=1577571349.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='arviz-devs', repo='arviz', sha='d58fd616bdbf2f269ca66d293428f14b97064946', commit_date=1569629064.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='asdf-format', repo='asdf', sha='8d342d36794f92db7b14a7a6f1415ff5d65fed9e', commit_date=1701819981.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='asdf-format', repo='asdf', sha='8e7fe6cab33649cb55fd5cdcac6cca77d9e9453c', commit_date=1698664980.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='astropy', repo='astropy', sha=None, commit_date=0.0)": { "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\nROOT_PATH=${PWD}\ngit clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install -e . scipy matplotlib\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='bjodah', repo='chempy', sha='10bdaa5a1d128959ec10128246d977fd137c9671', commit_date=1444135786.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" python setup.py sdist bdist_wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bjodah', repo='chempy', sha='f61bd0bc1083a4fa90c736d74d591c9eef51f80c', commit_date=1535629364.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bloomberg', repo='memray', sha='51aa84e51179d80758b3bbd7dce097b2b2e4fd19', commit_date=1701719904.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pkgconfig\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge libunwind\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bloomberg', repo='memray', sha='926624f40e4f71bb71c8e22106d7979cb06bb29a', commit_date=1673995384.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='calebbell', repo='thermo', sha='436a9ccd0c73c55df4d4a8f7383493f540a6b13f', commit_date=1641864678.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='calebbell', repo='thermo', sha='71259b242aadd45a5e1d2249e29019a2e856ac04', commit_date=1643426520.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='danielgtaylor', repo='python-betterproto', sha='c82816b8be4d6f240cde4e5f28234e5ee3b26920', commit_date=1697423550.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='danielgtaylor', repo='python-betterproto', sha='ca6b9fe1a2ccf7e8a9b02085a56de905e89eea69', commit_date=1697455035.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable install fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='datalad', repo='datalad', sha='83447c2944e4ed89e0a82ff2a3ea9b74221e8990', commit_date=1606433958.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='datalad', repo='datalad', sha='a9f423a8da0d144c88a74893449b6cb88cee3588', commit_date=1637870957.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dedupeio', repo='dedupe', sha='7d2c79becabe375980613ff3bf66da678cbad658', commit_date=1719492316.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dedupeio', repo='dedupe', sha='9d527acc20f565f6859e9ee6f4a4903c0629a29f', commit_date=1673926972.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='deepchecks', repo='deepchecks', sha='9a5dd7dc90640d987d6ecf03b8bd9a1ea86199cb', commit_date=1658146693.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Fix the invalid version issue by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='deepchecks', repo='deepchecks', sha='e836e79da9cc0ac9e99ae3d4bfdd2982cd299080', commit_date=1661253434.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the invalid version error by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='default', repo='default', sha=None, commit_date=0.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone\n", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='devitocodes', repo='devito', sha='ccfb8230f2e5030e4a7b3548334e2d03757841f6', commit_date=1708609467.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='devitocodes', repo='devito', sha='e37d6ffc9edf5b0acc2e0b68c1853052c2959fda', commit_date=1719409850.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dipy', repo='dipy', sha='26ad85ff190ad0145f73fc87354cb12f2792a475', commit_date=1712766187.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dipy', repo='dipy', sha='984a2bbff98c7090a222fde52c3b7f6b0b3a189e', commit_date=1751068916.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='django-components', repo='django-components', sha='2472c2ad338a23fba015d4d9816cb62d1325455f', commit_date=1742720064.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='django-components', repo='django-components', sha='e0b718c31495a400d6e8712ed931ce4ab253e673', commit_date=1745142786.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dottxt-ai', repo='outlines', sha='1e8022e210dc7eb193d8e5808a617b1a9dc15644', commit_date=1752229063.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dottxt-ai', repo='outlines', sha='e9485cf2126d9c14bd749d55f8aa6729d96808d0', commit_date=1732739305.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='geopandas', repo='geopandas', sha='7d50380229eb84375546c2dc586de659096a6e61', commit_date=1531683944.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='geopandas', repo='geopandas', sha='c07ae3c50b6aa20e745b3693321c469e0d828a1c', commit_date=1611525697.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='h5py', repo='h5py', sha='1487a54fb5149603dcc32604df4db418ea4f5236', commit_date=1663429492.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='h5py', repo='h5py', sha='a8e82bcd63de14daddbc84c250a36c0ee8c850f6', commit_date=1602327474.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='holoviz', repo='datashader', sha='00220d8d24a4ada0ac8d30b6875004af5b03fdc4', commit_date=1738081225.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='holoviz', repo='datashader', sha='d9403a963e10e57cbf6c00c64c2998e9931097c0', commit_date=1736788153.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install hatchling\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='inducer', repo='loopy', sha='628b37187bec02ecd863662a96d024fbea5e89bf', commit_date=1623653651.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='inducer', repo='loopy', sha='b5da71bb9abf90848e0f196eedbd564d4fc477d2', commit_date=1623736465.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='ipython', repo='ipyparallel', sha='127b48f8bfeb3576c27e734a5414599fbbd4037e', commit_date=1679989417.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='ipython', repo='ipyparallel', sha='1cda27e603bf6e14866d085822afbf19b04d7574', commit_date=1681399422.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='kedro-org', repo='kedro', sha='507ebe4fbb660cd38e7ba5f9fbf89d35bfce29a4', commit_date=1746617473.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='kedro-org', repo='kedro', sha='b3a29d18f8ba2572a371f92b6f862148b77ffec6', commit_date=1744035416.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='lmfit', repo='lmfit-py', sha='9f9af6f36c0928767ea8b004ea8cb5a16aba6b04', commit_date=1634240070.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='lmfit', repo='lmfit-py', sha='f3dfdd8607aca6aceae29fb3fd57e03fd308a472', commit_date=1547606940.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='makepath', repo='xarray-spatial', sha='4df552cb70ae2f6f07b4325bcbf6a1b2afdb6718', commit_date=1643710398.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pyct\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='makepath', repo='xarray-spatial', sha='59984d859820e6e1cd9f11f1bf7696c04d1924fb', commit_date=1646634548.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install param pyct # Ensure 'param' and 'pyct' are installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arctic', sha='91c2d269d7ad48db23799b3d21cb191880286806', commit_date=1519908330.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arctic', sha='d33d24bb8d6d6625351b316ce55b74ef8c957744', commit_date=1521040101.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arcticdb', sha='97493e6cf3b46f52204ce5ef436f1e828f6b0bb3', commit_date=1728297449.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arcticdb', sha='dd4617e309c5b31cebe79816ea43bf1136b59365', commit_date=1722514119.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation .\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mars-project', repo='mars', sha='a4645734e87bd01320ecf28191f6954dd034cbf4', commit_date=1654482585.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mars-project', repo='mars', sha='acecc9c6bdb7fbd45003e4a37424c42a4cec8ac2', commit_date=1652428417.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='mdanalysis', repo='mdanalysis', sha='0030b3864eb77a90a9442904e7d64d1619c6add5', commit_date=1607478583.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", @@ -61,7 +311,7 @@ "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, "Task(owner='mdanalysis', repo='mdanalysis', sha='264d6f9357a978444baa1f99411a03453664ab2b', commit_date=1672867721.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n \n # Install build dependencies first\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install \"numpy<2.0.0\" cython\n \n # Install package requirements before build\n if [ -f \"package/requirements.txt\" ]; then\n # Create temporary requirements file with numpy constraint\n sed 's/numpy>=2.0.0rc1/numpy<2.0.0/g' package/requirements.txt > temp_requirements.txt\n micromamba run -n \"asv_${version}\" pip install -r temp_requirements.txt\n rm temp_requirements.txt\n fi\n \n # Build and install package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, @@ -81,7 +331,7 @@ "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, "Task(owner='mdanalysis', repo='mdanalysis', sha='392c8ae5391e20f5e496f7ac03dae08c44deca3b', commit_date=1646727863.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest setuptools wheel build\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n # First check if we're in the package directory, if not try to find it\n if [ ! -f \"setup.py\" ] && [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"setup.py\" ]; then\n # Install in editable mode with required build dependencies\n micromamba run -n \"asv_${version}\" pip install -e . --no-deps --no-build-isolation\n else\n echo \"Error: setup.py not found in expected locations\"\n exit 1\n fi\ndone", + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Check for pyproject.toml or setup.py and install accordingly\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, @@ -450,6 +700,66 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='modin-project', repo='modin', sha='be3e716107a185961fc209c343b0feefe0fb9751', commit_date=1684841207.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='modin-project', repo='modin', sha='c5aac3ef99d14305ea9a130e14155fc37495e199', commit_date=1608304159.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='napari', repo='napari', sha='3b6800763f97452ccf8230abf5a65fd6beedd247', commit_date=1606539287.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='napari', repo='napari', sha='dfeefb43af6538dd1e5ad7820128dfc844dc54b1', commit_date=1723973799.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='networkx', repo='networkx', sha='1071e14b81baaa4f0becc1849e85839ae8c671d9', commit_date=1716269137.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='networkx', repo='networkx', sha='81df24ce59b5b4fddfa65cd0a57db96748bba904', commit_date=1745208237.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='newton-physics', repo='newton', sha='5b18850fd8243e4c707b596880c01c1966e5168e', commit_date=1753825967.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='newton-physics', repo='newton', sha='cd07ab2c989df6392253a77e82333ec57a433e94', commit_date=1751556054.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nilearn', repo='nilearn', sha='6c1a76e37cf1c0dd6b800271cb3994f3efd38d07', commit_date=1744125996.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nilearn', repo='nilearn', sha='73fe9520ea705056f89b1cd5982947de13d515a0', commit_date=1754650581.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='numpy', repo='numpy', sha='4092a9e160cc247a4a45724579a0c829733688ca', commit_date=1459109632.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='numpy', repo='numpy', sha='9c3f0bb9955d530d43487f2ab800c765c83a3ea7', commit_date=1716460609.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='nvidia', repo='warp', sha='5495dc762dae2f09b648588d0f979e03ea3ef88b', commit_date=1741386626.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --verbose --editable ${ROOT_PATH}\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", @@ -481,7 +791,7 @@ "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, "Task(owner='nvidia', repo='warp', sha='dc693d89d5b85ac7e72c7f4e226eb58a5d54131f', commit_date=1751384285.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with CUDA support\n micromamba create -y -n \"asv_${version}\" -c conda-forge -c nvidia \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \\\n pytest ninja cmake cuda-toolkit=11.8 cuda-nvcc=11.8 cuda-libraries=11.8\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n\n # Set up CUDA environment\n export CUDA_PATH=/opt/conda/envs/asv_${version}\n export PATH=$CUDA_PATH/bin:$PATH\n export LD_LIBRARY_PATH=$CUDA_PATH/lib64:$LD_LIBRARY_PATH\n\n # Create necessary symlinks for CUDA headers\n mkdir -p /usr/local/cuda/include\n if [ -d \"$CUDA_PATH/include\" ]; then\n ln -sf $CUDA_PATH/include/* /usr/local/cuda/include/\n fi\n\n # Build and install WARP\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Ensure the necessary libraries are built before attempting to install the package\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, @@ -490,6 +800,126 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='optuna', repo='optuna', sha='445048a74c9090e60a82a49605044cc42727642a', commit_date=1650874136.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='optuna', repo='optuna', sha='c634449ebbd2160ee44a1845d1efd6c20ee200ae', commit_date=1714538588.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pandas-dev', repo='pandas', sha='2f4c93e8322775a0bb06429a02429b95ba6abb26', commit_date=1698253642.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pandas-dev', repo='pandas', sha='94a8af55b703fbaea19da9902a9790c7b93dc0ad', commit_date=1686591905.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='posthog', repo='posthog', sha='16075ff5c3671587db9e6a6a3ed396058d0f413b', commit_date=1733419912.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the pyproject.toml issue by ensuring the 'version' field is present\n if ! grep -q \"version\" \"${ROOT_PATH}/pyproject.toml\"; then\n echo \"version = '0.1.0'\" >> \"${ROOT_PATH}/pyproject.toml\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='posthog', repo='posthog', sha='3578a0c1c2b6f4425dc0fddf31d3d256bbf3fc87', commit_date=1655908403.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pvlib', repo='pvlib-python', sha='3692427bef155a32eac525fe965ed8d407a7846e', commit_date=1660774705.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pvlib', repo='pvlib-python', sha='b8c56c5e725ed12f15342c5336f71d52ec8008ce', commit_date=1749300951.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pybamm-team', repo='pybamm', sha='b1fc5950f0d8e5c8e104e00573fdff5561818014', commit_date=1723152711.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pybamm-team', repo='pybamm', sha='e1f52ffcf9811bb7d5046af47c48a2291bfd50b8', commit_date=1653925577.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --constraint \"<3.10,>=3.7\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='bottleneck', sha='c5356daccdab4afc293f56d4b4ff47c154be5bcd', commit_date=1716493787.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='bottleneck', sha='dc01fad42713181b1f2bb13a965eb0651d1308b6', commit_date=1729241092.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --ignore-requires-python\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='xarray', sha='4cbb7cbd86af1ccfe2b3b98f0e36a410f86d77ef', commit_date=1523669869.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='xarray', sha='dd6222f01a476caa96630e26d5b02fad6777a886', commit_date=1747916222.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pydicom', sha='50cd981a068c74b01d854c6cac9bb897fe0b74a9', commit_date=1726970247.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pydicom', sha='87266d96add6a6cccaa3032bbc96b0e3009c6dea', commit_date=1690047796.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pynetdicom', sha='1b701e898b489d561884d20ad78920607a6d1df0', commit_date=1563786471.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pynetdicom', sha='bb1f9d164d5c408fc28e02f924b3821b92cb45ad', commit_date=1555925288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pymc-devs', repo='pymc', sha='6360b005fc610d0505f84885743215a3e09f046e', commit_date=1614035911.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pymc-devs', repo='pymc', sha='a06081e1e9649bd56e3528cb96380efdf6bb2dc0', commit_date=1710322397.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pysal', repo='momepy', sha='6467ae26e8bfca9ba91e7795ab7899aaf89c576c', commit_date=1604013921.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pysal', repo='momepy', sha='7619f2f760d9027434369114a49150e3d3a483fb', commit_date=1603224289.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-adaptive', repo='adaptive', sha='50fae4341c53439f57fcea63346ba3581bd187d4', commit_date=1665457361.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-adaptive', repo='adaptive', sha='a9bb7f612717000dd2cf6899d8ebbf479807f6f5', commit_date=1550239213.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='python-control', repo='python-control', sha='0422c82a80b3ec0dc7fcbc69562f99e35358ee80', commit_date=1680293750.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", @@ -545,6 +975,71 @@ "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" }, + "Task(owner='python-hyper', repo='h11', sha='80805f06e5859692a9dcc32484b2745b7f215a8a', commit_date=1597311658.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-hyper', repo='h11', sha='d64468627a4adeb4140e1480a836c85ba903a2c6', commit_date=1522821575.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pytroll', repo='satpy', sha='94fc4f7749bc2a27f76c7a16a7289037d41120f2', commit_date=1644305622.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pytroll', repo='satpy', sha='aa7f0dd616a973eb2de0e5b77a9ec51d08cc601c', commit_date=1659722497.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pywavelets', repo='pywt', sha='21a30d2af5aca2b3c5f827aa407cb549e2c99fb9', commit_date=1551150162.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --use-pep517\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pywavelets', repo='pywt', sha='74b44217a66199fa2e0f8e036955fc00f5cbc21a', commit_date=1708613848.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='qiskit', repo='qiskit', sha='023cbd4ec646fc81e0434b6de434bb477ad94979', commit_date=1755506488.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='qiskit', repo='qiskit', sha='b12e9ec3cff020983e3dde9b16f5ccc4fd0f4963', commit_date=1715792171.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython setuptools-rust\n micromamba run -n \"asv_${version}\" pip install rustup\n micromamba run -n \"asv_${version}\" rustup toolchain install stable\n micromamba run -n \"asv_${version}\" rustup default stable\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='quantumlib', repo='cirq', sha='01ae51eebf3b18a5cbee9fc0c697d4e1511c07f2', commit_date=1640302944.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='quantumlib', repo='cirq', sha='1a75d9faee3b78765bb4badcf73e3d3e72a3ca2a', commit_date=1744652301.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='rockhopper-technologies', repo='enlighten', sha='d239fa5496a6c342b85343d53a4c16d8db9a87a5', commit_date=1698502059.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-image', repo='scikit-image', sha='0ff35b21293405e9922e44b9dda3818db960b87e', commit_date=1674543103.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-image', repo='scikit-image', sha='c7479c1d7430020a9ee9d92f25a1f0c33e36a7c1', commit_date=1597584715.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, "Task(owner='scikit-learn', repo='scikit-learn', sha='044f1b00a62c9083ce3212a3e69046c9afac0de6', commit_date=1662470783.0)": { "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", @@ -1319,6 +1814,136 @@ "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='57086e91b65b88a95c89449aa501ff68a61dc39a', commit_date=1563459886.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='faa240fd7469176036a91430ae6a0a45e627c94a', commit_date=1531145592.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scipy', repo='scipy', sha='83dbd97a76af8621dd0228a797f5207bed094c23', commit_date=1679643125.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran pybind11\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge openblas\n micromamba run -n \"asv_${version}\" git submodule update --init\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scipy', repo='scipy', sha='b919b4aa67a541b1fef91820a4e94156f7dd36d2', commit_date=1731196689.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install pybind11\n micromamba run -n \"asv_${version}\" pip install openblas\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='cartopy', sha='9a4d894d9adab3b3a8d9cee6299581ba0ef9ec20', commit_date=1662748176.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='cartopy', sha='d9825f18dc6a70b5b4ef6bc5bf48d8025eef1e8e', commit_date=1581379933.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --upgrade setuptools\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='iris', sha='33deead5846b37019902ba067c87e710e55ff6e6', commit_date=1650551816.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='iris', sha='b2ce2a34e2eef7e3d6203c77ada7ed4ce89e3145', commit_date=1573652360.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='anndata', sha='2712af6efcf2d4356f4185a10e92328168710d9f', commit_date=1680623010.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython flit-core setuptools_scm\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='anndata', sha='df213f659f0e9eadfcab4af48ee98de7145252a7', commit_date=1733842403.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='scanpy', sha='7f3f89ac02e924a3a6d55c31730cfaf23b0b4223', commit_date=1744636041.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='scanpy', sha='ad657edfb52e9957b9a93b3a16fc8a87852f3f09', commit_date=1718709475.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='shapely', repo='shapely', sha='3c3a83986ac5bf434e0ca6b7bd16571a1ddac0a4', commit_date=1696785164.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='shapely', repo='shapely', sha='ff2ceac81cca6240c459eba5a5ce07084fe25ad2', commit_date=1662401853.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sourmash-bio', repo='sourmash', sha='9230fce7479c547c96dabe0c1a749a71a4b9e77c', commit_date=1650894889.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sourmash-bio', repo='sourmash', sha='d2d638b645048cc93377fb9aff8a3be8c937b8b3', commit_date=1613310154.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge rust\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='spotify', repo='voyager', sha='49416c5db539a40adba2588bfe19dc8736db01b2', commit_date=1734118555.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='spotify', repo='voyager', sha='88cfc468617fde8360ac6db7e71bc578ba49ed16', commit_date=1725990271.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"${ROOT_PATH}\"\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sunpy', repo='sunpy', sha='01ea7b5e2760c24e08386f95fd5fd1c0f73da47f', commit_date=1739035442.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sunpy', repo='sunpy', sha='770f95dbfb033ffacc7172a3cff5158b09f7efe4', commit_date=1651836877.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython extension-helpers\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='textualize', repo='rich', sha='1de94713811101702b8fcf283c64d1a5de5a8213', commit_date=1657547667.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='textualize', repo='rich', sha='cb92947610614e04116f82cb001ed44dda1699fb', commit_date=1647342081.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='tqdm', repo='tqdm', sha='0f823e79f303b4a93ef1381badb1e65757e5070f', commit_date=1603641812.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='tqdm', repo='tqdm', sha='42761473f9edf276937cc3a28a6fcabc59f5f97d', commit_date=1575632008.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='xorbitsai', repo='xorbits', sha='aee883be1dcd4cbbd43d67794932d5c858fcffe2', commit_date=1676955703.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to non-editable if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='xorbitsai', repo='xorbits', sha='ebc391fe0fa55599c3197c52408bd43a4bd9476f', commit_date=1695401335.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml if setup.py is not found\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n else\n echo \"Neither 'setup.py' nor 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" } }, "version": 1 diff --git a/scratch/scripts/benchmark_commits.py b/scratch/scripts/benchmark_commits.py index 7273ab9..305f7db 100644 --- a/scratch/scripts/benchmark_commits.py +++ b/scratch/scripts/benchmark_commits.py @@ -2,23 +2,28 @@ import argparse import asyncio +import datetime import logging import math import os import pickle +import shutil from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path import asv import pandas as pd +from datasmith.benchmark.collection import BenchmarkCollection from datasmith.docker.context import ContextRegistry from datasmith.docker.orchestrator import ( build_repo_sha_image, get_docker_client, orchestrate, ) +from datasmith.docker.validation import BuildResult, Task from datasmith.logging_config import configure_logging +from datasmith.scrape.utils import _parse_commit_url # logger = configure_logging(level=logging.DEBUG, stream=open(Path(__file__).with_suffix(".log"), "w")) logger = configure_logging(level=logging.DEBUG) @@ -30,11 +35,16 @@ def parse_args() -> argparse.Namespace: formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( - "--filtered-commits", + "--commits", type=Path, required=True, help="Path to a jsonl containing a pandas dataframe with commit_ids, repo_name, and the relative asv_conf_location.", ) + parser.add_argument( + "--dashboard", + type=Path, + help="Path to the dashboard containing the benchmarks. Either --dashboard or --commits must be provided.", + ) parser.add_argument( "--max-concurrency", type=int, @@ -70,35 +80,70 @@ def parse_args() -> argparse.Namespace: action="store_true", help="Force rebuild the Docker images even if they already exist.", ) + parser.add_argument( + "--context-registry", + type=Path, + help="Path to the context registry JSON file.", + ) + parser.add_argument( + "--limit-per-repo", + type=int, + default=-1, + help="Cap SHAs per repo (keeps your small-scale test). -1 = no limit.", + ) return parser.parse_args() -def process_commits(commits_pth: Path) -> list[tuple[str, str, str]]: - commits = pd.read_json(commits_pth, lines=True) - all_states = {} - for _, row in commits.iterrows(): - repo_name = row["repo_name"] - sha = row["commit_sha"] - has_asv = row.get("has_asv", True) - if not has_asv: - logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") - continue - owner, repo = repo_name.split("/") - if (owner, repo) not in all_states: - all_states[(owner, repo)] = {(sha)} - else: - all_states[(owner, repo)].add(sha) - - all_states_list = [(owner, repo, sha) for (owner, repo), shas in all_states.items() for sha in shas] - - return all_states_list - - -def main() -> None: - args = parse_args() - - all_states = process_commits(args.filtered_commits) - context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[tuple[str, float]]]: + if args.dashboard: + dashboard = BenchmarkCollection.load(args.dashboard) + all_states = {} + for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): + owner = owner.lower() + repo = repo.lower() + sha = sha.lower() + if (owner, repo) not in all_states: + all_states[(owner, repo)] = {(sha, 0.0)} + else: + all_states[(owner, repo)].add((sha, 0.0)) + elif args.commits: + commits = pd.read_json(args.commits, lines=True) + all_states = {} + for _, row in commits.iterrows(): + repo_name = row["repo_name"] + sha = row["commit_sha"] + has_asv = row.get("has_asv", True) + if not has_asv: + logger.debug(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + continue + owner, repo = repo_name.split("/") + commit_date_unix: float = ( + 0.0 if row.get("date", None) is None else datetime.datetime.fromisoformat(row["date"]).timestamp() + ) + if (owner, repo) not in all_states: + all_states[(owner, repo)] = [(sha, commit_date_unix)] + else: + all_states[(owner, repo)].append((sha, commit_date_unix)) + else: + raise ValueError("Either --dashboard or --commits must be provided.") + return all_states + + +def main(args: argparse.Namespace) -> None: + client = get_docker_client() + all_states = process_inputs(args) + context_registry = ContextRegistry.load_from_file(path=args.context_registry) + + # Prepare tasks + tasks: list[Task] = [] + for (owner, repo), uniq in all_states.items(): + limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) + for sha, date in limited: + task = Task(owner, repo, sha, commit_date=date) + if task in context_registry: + tasks.append(task) + else: + logger.debug(f"main: skipping {task} as not in context registry") max_concurrency = ( args.max_concurrency if args.max_concurrency != -1 else max(4, math.floor(0.5 * (os.cpu_count() or 1))) @@ -111,30 +156,58 @@ def main() -> None: raise ValueError() n_cores = args.num_cores - output_dir = Path(args.output_dir).absolute() - - # Create the results and logs directories if they don't exist - Path(f"{output_dir}/results").mkdir(parents=True, exist_ok=True) - Path(f"{output_dir}/logs").mkdir(parents=True, exist_ok=True) - - client = get_docker_client() - - # Ensure all required Docker images are available - docker_image_names = [] - - with ThreadPoolExecutor(max_workers=args.num_cores * 4) as pool: - futures = [ - pool.submit(build_repo_sha_image, client, context_registry, owner, repo, sha, args.force_rebuild) - for owner, repo, sha in all_states - ] - for fut in as_completed(futures): - docker_image_names.append(fut.result()) + output_dir = args.output_dir.absolute() + # remove the folders first. + shutil.rmtree(output_dir / "results", ignore_errors=True) + shutil.rmtree(output_dir / "logs", ignore_errors=True) + + (args.output_dir / "results").mkdir(parents=True, exist_ok=True) + (args.output_dir / "logs").mkdir(parents=True, exist_ok=True) + + machine_defaults: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] + machine_defaults = { + k: str(v.replace(" ", "_").replace("'", "").replace('"', "")) for k, v in machine_defaults.items() + } + logger.debug("main: machine_defaults keys=%d", len(machine_defaults)) + + builds: list[BuildResult] = [] + if args.max_concurrency < 1: + for t in tasks: + build_res: BuildResult = build_repo_sha_image( + client=client, + context_registry=context_registry, + task=t, + force=args.force_rebuild, + ) + builds.append(build_res) + else: + with ThreadPoolExecutor(max_workers=args.max_concurrency) as pool: + futures = [ + pool.submit( + build_repo_sha_image, + client, + context_registry, + task, + args.force_rebuild, + ) + for task in tasks + ] + for fut in as_completed(futures): + builds.append(fut.result()) + + successful_builds = [b for b in builds if b.rc != 1] + + logger.info("Running benchmarks for %d images", len(successful_builds)) + logger.info("Failed builds for %d images", len(builds) - len(successful_builds)) + for b in builds: + if b.rc == 1: + logger.warning("Build failed for %s", b.image_name) machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] machine_args["num_cpu"] = str(args.num_cores) files_by_image: dict[str, dict[str, str]] = asyncio.run( orchestrate( - docker_image_names=docker_image_names, + docker_image_names=[b.image_name for b in successful_builds], asv_args=asv_args, machine_args=machine_args, max_concurrency=max_concurrency, @@ -156,4 +229,5 @@ def main() -> None: if __name__ == "__main__": - main() + args = parse_args() + main(args) diff --git a/src/datasmith/docker/context_registry.py b/scratch/scripts/initialize_context_registry.py similarity index 99% rename from src/datasmith/docker/context_registry.py rename to scratch/scripts/initialize_context_registry.py index a2c1960..adbcef7 100644 --- a/src/datasmith/docker/context_registry.py +++ b/scratch/scripts/initialize_context_registry.py @@ -355,7 +355,6 @@ ), ) - # CONTEXT_REGISTRY.register( # "asv/default/nobuild", # DockerContext( diff --git a/scratch/scripts/parallel_validate_containers.py b/scratch/scripts/parallel_validate_containers.py index e325918..2eddd91 100644 --- a/scratch/scripts/parallel_validate_containers.py +++ b/scratch/scripts/parallel_validate_containers.py @@ -1,6 +1,7 @@ from __future__ import annotations import argparse +import datetime import json from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path @@ -53,18 +54,23 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--limit-per-repo", type=int, default=5, help="Cap SHAs per repo (keeps your small-scale test). -1 = no limit." ) + parser.add_argument( + "--context-registry", + type=Path, + help="Path to the context registry JSON file.", + ) return parser.parse_args() -def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: +def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[tuple[str, float]]]: if args.dashboard: dashboard = BenchmarkCollection.load(args.dashboard) all_states = {} for owner, repo, sha in dashboard.enriched_breakpoints.url.apply(_parse_commit_url): if (owner, repo) not in all_states: - all_states[(owner, repo)] = {sha} + all_states[(owner, repo)] = {(sha, 0.0)} else: - all_states[(owner, repo)].add(sha) + all_states[(owner, repo)].add((sha, 0.0)) elif args.commits: commits = pd.read_json(args.commits, lines=True) all_states = {} @@ -73,13 +79,16 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: sha = row["commit_sha"] has_asv = row.get("has_asv", True) if not has_asv: - logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + logger.debug(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") continue owner, repo = repo_name.split("/") + commit_date_unix: float = ( + 0.0 if row.get("date", None) is None else datetime.datetime.fromisoformat(row["date"]).timestamp() + ) if (owner, repo) not in all_states: - all_states[(owner, repo)] = {(sha)} + all_states[(owner, repo)] = [(sha, commit_date_unix)] else: - all_states[(owner, repo)].add(sha) + all_states[(owner, repo)].append((sha, commit_date_unix)) else: raise ValueError("Either --dashboard or --commits must be provided.") return all_states @@ -89,14 +98,17 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[str]]: def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) - context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) - + context_registry = ContextRegistry.load_from_file(path=args.context_registry) # Prepare tasks tasks: list[Task] = [] for (owner, repo), uniq in all_states.items(): limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) - for sha in limited: - tasks.append(Task(owner, repo, sha)) + for sha, date in limited: + task = Task(owner, repo, sha, commit_date=float(date)) + if task in context_registry: + tasks.append(task) + else: + logger.debug(f"main: skipping {task} not in context registry") (args.output_dir / "results").mkdir(parents=True, exist_ok=True) # reset outputs @@ -111,13 +123,21 @@ def main(args: argparse.Namespace) -> None: logger.info("Starting parallel validation of %d tasks with %d workers", len(tasks), args.max_workers) results: list[dict] = [] - with ThreadPoolExecutor(max_workers=args.max_workers) as ex: - futures = [ex.submit(validate_one, t, args, client, context_registry, machine_defaults) for t in tasks] - for fut in as_completed(futures): - rec = fut.result() + if args.max_workers < 1: + for t in tasks: + rec = validate_one(t, args, client, context_registry, machine_defaults) results.append(rec) - with _err_lock, open(args.output_dir / "failures.jsonl", "a") as jf: + with _err_lock, open(args.output_dir / "logs.jsonl", "a") as jf: jf.write(json.dumps(rec) + "\n") + return + else: + with ThreadPoolExecutor(max_workers=args.max_workers) as ex: + futures = [ex.submit(validate_one, t, args, client, context_registry, machine_defaults) for t in tasks] + for fut in as_completed(futures): + rec = fut.result() + results.append(rec) + with _err_lock, open(args.output_dir / "logs.jsonl", "a") as jf: + jf.write(json.dumps(rec) + "\n") # Rollup (minimal, quick to read) rollup = { diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index 77147bd..b212e86 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -17,7 +17,7 @@ from datasmith.logging_config import configure_logging from datasmith.scrape.utils import _parse_commit_url -logger = configure_logging() +logger = configure_logging(level=10) # logger = configure_logging(level=10, stream=open(Path(__file__).with_suffix(".log"), "w")) @@ -56,6 +56,11 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--limit-per-repo", type=int, default=5, help="Cap SHAs per repo (keeps your small-scale test). -1 = no limit." ) + parser.add_argument( + "--context-registry", + type=Path, + help="Path to the context registry JSON file.", + ) return parser.parse_args() @@ -76,7 +81,7 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[tuple[ sha = row["commit_sha"] has_asv = row.get("has_asv", True) if not has_asv: - logger.warning(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") + logger.debug(f"Skipping {repo_name} commit {sha} as it does not have ASV benchmarks.") continue owner, repo = repo_name.split("/") commit_date_unix: float = ( @@ -91,17 +96,28 @@ def process_inputs(args: argparse.Namespace) -> dict[tuple[str, str], set[tuple[ return all_states +def prepare_tasks( + all_states: dict[tuple[str, str], set[tuple[str, float]]], limit_per_repo: int, context_registry: ContextRegistry +) -> list[Task]: + tasks: list[Task] = [] + for (owner, repo), uniq in all_states.items(): + limited = list(uniq)[: max(0, limit_per_repo)] if limit_per_repo > 0 else list(uniq) + for sha, date in limited: + task = Task(owner, repo, sha, commit_date=date) + if task not in context_registry: + tasks.append(task) + else: + logger.debug(f"prepare_tasks: skipping {task} as already in context registry") + return tasks + + def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) - context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) + context_registry = ContextRegistry.load_from_file(path=args.context_registry) # Prepare tasks - tasks: list[Task] = [] - for (owner, repo), uniq in all_states.items(): - limited = list(uniq)[: max(0, args.limit_per_repo)] if args.limit_per_repo > 0 else list(uniq) - for sha, date in limited: - tasks.append(Task(owner, repo, sha, commit_date=date)) + tasks = prepare_tasks(all_states, args.limit_per_repo, context_registry) (args.output_dir / "results").mkdir(parents=True, exist_ok=True) # reset outputs @@ -131,7 +147,7 @@ def main(args: argparse.Namespace) -> None: if int(res["rc"]) != 1: logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) - context_registry.save_to_file(path=Path("scratch/context_registry.json")) + context_registry.save_to_file(path=args.context_registry) else: with ThreadPoolExecutor(max_workers=args.max_workers) as ex: futures = [ @@ -154,7 +170,7 @@ def main(args: argparse.Namespace) -> None: if int(res["rc"]) != 1: logger.info("main: SUCCESS %s/%s@%s", res["owner"], res["repo"], res["sha"]) - context_registry.save_to_file(path=Path("scratch/context_registry.json")) + context_registry.save_to_file(path=args.context_registry) # Rollup (minimal, quick to read) rollup = { diff --git a/scratch/scripts/update_context_registry.py b/scratch/scripts/update_context_registry.py new file mode 100644 index 0000000..8597b25 --- /dev/null +++ b/scratch/scripts/update_context_registry.py @@ -0,0 +1,21 @@ +from copy import deepcopy +from pathlib import Path + +from datasmith.docker.context import ContextRegistry, DockerContext + + +def update_cr_entrypoint(cr: ContextRegistry): + new_reg = {} + for k, v in cr.registry.items(): + new_v = deepcopy(v) + new_v.entrypoint_data = DockerContext().entrypoint_data + new_reg[k] = new_v + + cr.registry = new_reg + return cr + + +if __name__ == "__main__": + cr = ContextRegistry.load_from_file(Path("scratch/context_registry.json")) + new_cr = update_cr_entrypoint(cr) + new_cr.save_to_file(Path("scratch/context_registry_updated.json")) diff --git a/scratch/scripts/validate_containers.py b/scratch/scripts/validate_containers.py index 9360494..43f0af3 100644 --- a/scratch/scripts/validate_containers.py +++ b/scratch/scripts/validate_containers.py @@ -47,6 +47,11 @@ def parse_args() -> argparse.Namespace: default=Path("output"), help="Directory where the results will be stored.", ) + parser.add_argument( + "--context-registry", + type=Path, + help="Path to the context registry JSON file.", + ) return parser.parse_args() @@ -83,7 +88,7 @@ def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) - context_registry = ContextRegistry.load_from_file(path=Path("scratch/context_registry.json")) + context_registry = ContextRegistry.load_from_file(path=args.context_registry) machine_args: dict[str, str] = asv.machine.Machine.get_defaults() # pyright: ignore[reportAttributeAccessIssue] all_files_by_image = {} diff --git a/src/datasmith/agents/config.py b/src/datasmith/agents/config.py index 365412f..4bed37d 100644 --- a/src/datasmith/agents/config.py +++ b/src/datasmith/agents/config.py @@ -16,7 +16,10 @@ def configure_agent_backends() -> None: api_key = "unused-by-portkey" model = os.getenv("PORTKEY_MODEL_NAME", "@anthropic/claude-3-5-sonnet-latest") backend_url = PORTKEY_GATEWAY_URL - kwargs["headers"] = {"x-portkey-api-key": portkey_api_key} + kwargs["headers"] = { + "x-portkey-api-key": portkey_api_key, + "x-portkey-provider": model.split("/")[0].lstrip("@"), + } kwargs["custom_llm_provider"] = "openai" elif anthropic_api_key := os.getenv("ANTHROPIC_API_KEY"): api_key = anthropic_api_key diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index b4a23b0..9c4dd3b 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -34,7 +34,8 @@ def _ts_to_iso(ts: float | int | None) -> str: class BuildScriptSynthesis(dspy.Signature): - """Draft a bash script (building_data) to build & install a Python repo inside micromamba envs + """ + Draft a bash script (docker_build.sh) to build & install a Python repo inside micromamba envs discovered via asv.*.json. The script MUST be idempotent and safe to run in Docker. Respect this template: - discover and cd into the dir containing asv.*.json @@ -43,6 +44,7 @@ class BuildScriptSynthesis(dspy.Signature): * ensure asv + build tooling * then perform project install (editable or wheel) with best-guess flags - no user prompts, all non-interactive + - Do not surround with ```bash ... ```. Return raw bash script. """ # Inputs @@ -58,11 +60,15 @@ class BuildScriptSynthesis(dspy.Signature): failure_more = dspy.InputField( desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." ) - last_building_data = dspy.InputField(desc="Previous building_data script; empty on attempt #1.") - expected_template = dspy.InputField(desc="Stable outer template; only BUILD STEPS may be customized.") + last_docker_build_script = dspy.InputField(desc="Previous docker_build.sh script.") + expected_template = dspy.InputField(desc="Stable outer template..") # Output - building_data = dspy.OutputField(desc="Final executable bash script with only the BUILD STEPS region customized.") + error_summary = dspy.OutputField(desc="A brief summary of the last build failure, and possible causes.") + resolution_steps = dspy.OutputField(desc="Concrete steps to resolve the failure.") + docker_build_script = dspy.OutputField( + desc="Final executable bash script that successfully builds the project from source." + ) class BuildScriptProgram(dspy.Module): @@ -78,7 +84,7 @@ def forward( stderr_logs: str, stdout_logs: str, failure_more: str, - last_building_data: str, + last_docker_build_script: str, expected_template: str, ) -> str: logger.info( @@ -87,7 +93,7 @@ def forward( sha, len(stderr_logs or ""), len(stdout_logs or ""), - bool(last_building_data), + bool(last_docker_build_script), failure_more, ) out = self.predict( @@ -97,15 +103,17 @@ def forward( stderr_logs=stderr_logs or "", stdout_logs=stdout_logs or "", failure_more=failure_more or "N/A", - last_building_data=last_building_data or "", + last_docker_build_script=last_docker_build_script or "", expected_template=expected_template, ) # Safety belt: ensure the required fixed template anchors are present. - script = out.building_data.strip() # pyright: ignore[reportAttributeAccessIssue] + script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] logger.debug("DSPy: candidate script preview: %s", _preview(script, 240)) must_haves = ["cd_asv_json_dir()", "micromamba", "for version in $python_versions; do"] ok_template = all(m in script for m in must_haves) - if not ok_template: + must_not_haves = ["```", "import IPython", "from IPython", "exit(", "sys.exit("] + no_bad = all(m not in script for m in must_not_haves) + if (not ok_template) or (not no_bad): logger.warning("DSPy: template anchors missing; falling back to provided template") script = expected_template logger.info("DSPy: finalized script length=%d", len(script)) @@ -158,7 +166,7 @@ def synthesize_script( stderr_logs=stderr_tail or "", stdout_logs=stdout_tail or "", failure_more=failure_more or "N/A", - last_building_data=last_script or "", + last_docker_build_script=last_script or "", expected_template=building_template, ) script = str(script) diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index 0805c2c..95434a3 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -1,5 +1,6 @@ from __future__ import annotations +import contextlib import datetime import io import json @@ -141,7 +142,8 @@ def build_container_streaming( # noqa: C901 build_args: dict[str, str], *, force: bool = False, - timeout_s: int = 20 * 60, + delete_img: bool = False, + timeout_s: float = float("inf"), tail_chars: int = 4000, pull: bool = False, ) -> BuildResult: @@ -151,118 +153,129 @@ def build_container_streaming( # noqa: C901 report immediately). """ t0 = time.time() - - # Fast path: respect existing image when not forcing try: - img = client.images.get(image_name) - if force: - logger.info("Force rebuild requested. Removing '%s'.", image_name) - client.images.remove(image=img.id, force=True) + # Fast path: respect existing image when not forcing + try: + img = client.images.get(image_name) + if force: + logger.info("Force rebuild requested. Removing '%s'.", image_name) + with contextlib.suppress(Exception): + client.images.remove(image=img.id, force=True) + else: + logger.info("Docker image '%s' found locally (skip build).", image_name) + return BuildResult( + ok=True, + image_name=image_name, + image_id=img.id, + rc=0, + duration_s=time.time() - t0, + stderr_tail="", + stdout_tail="", + ) + except ImageNotFound: + logger.info("Docker image '%s' not found locally. Building.", image_name) + + # Streamed build via low-level API for better control + tar_stream = self.build_tarball_stream() + stdout_buf: deque[str] = deque(maxlen=2000) # chunk-tail buffers + stderr_buf: deque[str] = deque(maxlen=2000) + + # Pretty log line for transparency + if build_args: + build_args_str = " --build-arg ".join(f"{k}={v}" for k, v in build_args.items()) + logger.info("$ docker build -t %s . --build-arg %s", image_name, build_args_str) else: - logger.info("Docker image '%s' found locally (skip build).", image_name) + logger.info("$ docker build -t %s .", image_name) + + try: + stream = client.api.build( + fileobj=tar_stream, + custom_context=True, + tag=image_name, + buildargs=build_args, + decode=True, + rm=True, + pull=pull, + ) + except DockerException: + logger.exception("Failed to initiate build for '%s'", image_name) return BuildResult( - ok=True, + ok=False, image_name=image_name, - image_id=img.id, - rc=0, + image_id=None, + rc=1, duration_s=time.time() - t0, stderr_tail="", stdout_tail="", ) - except ImageNotFound: - logger.info("Docker image '%s' not found locally. Building.", image_name) - # Streamed build via low-level API for better control - tar_stream = self.build_tarball_stream() - stdout_buf: deque[str] = deque(maxlen=2000) # chunk-tail buffers - stderr_buf: deque[str] = deque(maxlen=2000) - - # Pretty log line for transparency - if build_args: - build_args_str = " --build-arg ".join(f"{k}={v}" for k, v in build_args.items()) - logger.info("$ docker build -t %s . --build-arg %s", image_name, build_args_str) - else: - logger.info("$ docker build -t %s .", image_name) + error_seen = None + try: + for chunk in stream: + # Time check first + if time.time() - t0 > timeout_s: + error_seen = "[TIMEOUT]" + break + + # Typical keys: 'stream', 'status', 'error', 'errorDetail' + if chunk.get("stream"): + s = str(chunk["stream"]) + if s: + stdout_buf.append(s) + if "status" in chunk and chunk.get("progressDetail"): + # Status lines (pulling base layers, etc.)—treat as stdout + s = str(chunk.get("status", "")) + if s: + stdout_buf.append(s + "\n") + if "error" in chunk or "errorDetail" in chunk: + error_seen = (chunk.get("error") or str(chunk.get("errorDetail", ""))).strip() + if error_seen: + # also track in stderr tail + stderr_buf.append(error_seen + "\n") + break + except APIError: + logger.exception("Build stream APIError for '%s'", image_name) + error_seen = "APIError during build" + + duration = time.time() - t0 + + # Success path: ensure image exists + if not error_seen: + try: + img = client.images.get(image_name) + return BuildResult( + ok=True, + image_name=image_name, + image_id=img.id, + rc=0, + duration_s=duration, + stderr_tail="".join(stderr_buf)[-tail_chars:], + stdout_tail="".join(stdout_buf)[-tail_chars:], + ) + except ImageNotFound: + error_seen = "Build completed but image not found" - try: - stream = client.api.build( - fileobj=tar_stream, - custom_context=True, - tag=image_name, - buildargs=build_args, - decode=True, - rm=True, - pull=pull, - ) - except DockerException: - logger.exception("Failed to initiate build for '%s'", image_name) + # Failure + rc = 124 if error_seen == "[TIMEOUT]" else 1 return BuildResult( ok=False, image_name=image_name, image_id=None, - rc=1, - duration_s=time.time() - t0, - stderr_tail="", - stdout_tail="", + rc=rc, + duration_s=duration, + stderr_tail="".join(stderr_buf)[-tail_chars:] or (error_seen or "")[-tail_chars:], + stdout_tail="".join(stdout_buf)[-tail_chars:], ) - - error_seen = None - try: - for chunk in stream: - # Time check first - if time.time() - t0 > timeout_s: - error_seen = "[TIMEOUT]" - break - - # Typical keys: 'stream', 'status', 'error', 'errorDetail' - if chunk.get("stream"): - s = str(chunk["stream"]) - if s: - stdout_buf.append(s) - if "status" in chunk and chunk.get("progressDetail"): - # Status lines (pulling base layers, etc.)—treat as stdout - s = str(chunk.get("status", "")) - if s: - stdout_buf.append(s + "\n") - if "error" in chunk or "errorDetail" in chunk: - error_seen = (chunk.get("error") or str(chunk.get("errorDetail", ""))).strip() - if error_seen: - # also track in stderr tail - stderr_buf.append(error_seen + "\n") - break - except APIError: - logger.exception("Build stream APIError for '%s'", image_name) - error_seen = "APIError during build" - - duration = time.time() - t0 - - # Success path: ensure image exists - if not error_seen: - try: - img = client.images.get(image_name) - return BuildResult( - ok=True, - image_name=image_name, - image_id=img.id, - rc=0, - duration_s=duration, - stderr_tail="".join(stderr_buf)[-tail_chars:], - stdout_tail="".join(stdout_buf)[-tail_chars:], - ) - except ImageNotFound: - error_seen = "Build completed but image not found" - - # Failure - rc = 124 if error_seen == "[TIMEOUT]" else 1 - return BuildResult( - ok=False, - image_name=image_name, - image_id=None, - rc=rc, - duration_s=duration, - stderr_tail="".join(stderr_buf)[-tail_chars:] or (error_seen or "")[-tail_chars:], - stdout_tail="".join(stdout_buf)[-tail_chars:], - ) + finally: + if delete_img: + try: + img = client.images.get(image_name) + logger.debug("Deleting image '%s' after build.", image_name) + client.images.remove(image=img.id, force=True) + except ImageNotFound: + pass + except DockerException: + logger.exception("Failed to delete image '%s' after build.", image_name) def to_dict(self) -> dict[str, str]: """Return a JSON-serializable mapping of this context's contents.""" @@ -431,6 +444,11 @@ def __getitem__(self, key: str) -> DockerContext: def __setitem__(self, key: str, context: DockerContext) -> None: self.register(key, context) + def __contains__(self, key: str | Task) -> bool: + if isinstance(key, str): + key = self.parse_key(key) + return key in self.registry + def save_to_file(self, path: Path) -> None: dat = self.serialize(pretty=True) with self._lock: diff --git a/src/datasmith/docker/entrypoint.sh b/src/datasmith/docker/entrypoint.sh index e8a519d..555b9c9 100644 --- a/src/datasmith/docker/entrypoint.sh +++ b/src/datasmith/docker/entrypoint.sh @@ -1,7 +1,14 @@ #!/usr/bin/env bash # set -euo pipefail +set -x : "${ASV_ARGS:?Need to set ASV_ARGS}" -: "${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}" +: "${ASV_MACHINE:=?Need to set ASV_MACHINE}" +: "${ASV_OS:=?Need to set ASV_OS}" +: "${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}" +: "${ASV_ARCH:=?Need to set ASV_ARCH}" +: "${ASV_CPU:=?Need to set ASV_CPU}" +: "${ASV_RAM:=?Need to set ASV_RAM}" + cd_asv_json_dir() { local match @@ -18,9 +25,6 @@ cd_asv_json_dir() { eval "$(micromamba shell hook --shell=bash)" -pip install "cython<3" -bash maintainer/install_all.sh develop - micromamba activate base ROOT_PATH=${PWD} cd_asv_json_dir || exit 1 @@ -44,11 +48,12 @@ path.mkdir(parents=True, exist_ok=True) config = asv.config.Config.load('$CONF_NAME') config.results_dir = str(path / 'results') config.html_dir = str(path / 'html') +config.branches = ['HEAD'] asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) " - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS} + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME --machine "$ASV_MACHINE" --os "$ASV_OS" --num_cpu "$ASV_NUM_CPU" --arch "$ASV_ARCH" --cpu "$ASV_CPU" --ram "$ASV_RAM" micromamba run -n "asv_${version}" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME done diff --git a/src/datasmith/docker/orchestrator.py b/src/datasmith/docker/orchestrator.py index 42d27a3..44bd83b 100644 --- a/src/datasmith/docker/orchestrator.py +++ b/src/datasmith/docker/orchestrator.py @@ -12,7 +12,7 @@ from docker.errors import DockerException, ImageNotFound from docker.models.containers import Container -from datasmith.docker.context import ContextRegistry +from datasmith.docker.context import BuildResult, ContextRegistry, Task from datasmith.logging_config import get_logger logger = get_logger("docker.orchestrator") @@ -51,20 +51,23 @@ def build_repo_image(client: docker.DockerClient, image_name: str, repo_url: str def build_repo_sha_image( - client: docker.DockerClient, context_registry: ContextRegistry, owner: str, repo: str, sha: str, force: bool = False -) -> str: - image_name = f"asv/{owner}/{repo}/{sha}" + client: docker.DockerClient, context_registry: ContextRegistry, task: Task, force: bool = False +) -> BuildResult: + assert task.sha is not None, "Task.sha must be set" # noqa: S101 + image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() docker_ctx = context_registry[image_name] - docker_ctx.build_container( + build_res: BuildResult = docker_ctx.build_container_streaming( client=client, image_name=image_name, build_args={ - "REPO_URL": f"https://www.github.com/{owner}/{repo}", - "COMMIT_SHA": sha, + "REPO_URL": f"https://www.github.com/{task.owner}/{task.repo}", + "COMMIT_SHA": task.sha, }, force=force, + tail_chars=10_000, + pull=False, ) - return image_name + return build_res async def run_container( @@ -86,29 +89,25 @@ async def run_container( # Normalise to the cpuset string Docker expects cpuset = ",".join(map(str, cores)) if not isinstance(cores, str) else cores num_cores = len(cpuset.split(",")) - sha = image.split(":")[0].split("-")[-1] # Extract the commit SHA from the image name + + sha = image.split(":")[0].split("/")[-1] # Extract the commit SHA from the image name if "machine" not in machine_args: raise ValueError("machine_args must contain a 'machine' key") machine_args["machine"] = sha env = { "ASV_ARGS": f"{asv_args} --cpu-affinity {cpuset} --parallel {num_cores} --set-commit-hash={sha} --machine={sha}", - "ASV_MACHINE_ARGS": " ".join([f"--{k} '{v}'" for k, v in machine_args.items()]), + "ASV_MACHINE": machine_args.get("machine", ""), + "ASV_OS": machine_args.get("os", ""), + "ASV_NUM_CPU": machine_args.get("num_cpu", "1"), + "ASV_ARCH": machine_args.get("arch", ""), + "ASV_CPU": machine_args.get("cpu", ""), + "ASV_RAM": machine_args.get("ram", ""), } def _launch() -> tuple[int, dict[str, str]]: - container_name = f"{image.split(':')[0]}-{idx:03d}" + container_name = f"{image.split(':')[0].replace('/', '-')}-{idx:03d}" logger.debug("docker run name=%s cpuset=%s env=%s", container_name, cpuset, env) - # Log the exact command a human could copy-paste - logger.info( - "$ docker run --rm --name %s -e ASV_ARGS='%s' -e ASV_MACHINE_ARGS='%s' --cpuset-cpus %s %s", - container_name, - env["ASV_ARGS"], - env["ASV_MACHINE_ARGS"], - cpuset, - image, - ) - # Start the container on the specified CPUs container = client.containers.run( image, diff --git a/src/datasmith/docker/validation.py b/src/datasmith/docker/validation.py index 1267ff9..6c7f4a8 100644 --- a/src/datasmith/docker/validation.py +++ b/src/datasmith/docker/validation.py @@ -281,6 +281,8 @@ def validate_one( # noqa: C901 return _handle_run_exception(task, build_cmd, run_cmd, args, image_name, build_stage) finally: # best-effort cleanup - with contextlib.suppress(Exception): + try: if container: container.remove(force=True) + except Exception: + logger.exception("Failed to remove container for %s", image_name) diff --git a/uv.lock b/uv.lock index d8eb54c..a231adf 100644 --- a/uv.lock +++ b/uv.lock @@ -1175,7 +1175,7 @@ dependencies = [ { name = "diskcache", marker = "python_full_version < '3.10'" }, { name = "joblib", marker = "python_full_version < '3.10'" }, { name = "json-repair", version = "0.44.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "litellm", marker = "python_full_version < '3.10'" }, + { name = "litellm", version = "1.63.11", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "magicattr", marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "openai", marker = "python_full_version < '3.10'" }, @@ -1184,7 +1184,7 @@ dependencies = [ { name = "pydantic", marker = "python_full_version < '3.10'" }, { name = "regex", marker = "python_full_version < '3.10'" }, { name = "requests", marker = "python_full_version < '3.10'" }, - { name = "rich", marker = "python_full_version < '3.10'" }, + { name = "rich", version = "14.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "tenacity", marker = "python_full_version < '3.10'" }, { name = "tqdm", marker = "python_full_version < '3.10'" }, { name = "ujson", marker = "python_full_version < '3.10'" }, @@ -1213,7 +1213,7 @@ dependencies = [ { name = "gepa", marker = "python_full_version >= '3.10'" }, { name = "joblib", marker = "python_full_version >= '3.10'" }, { name = "json-repair", version = "0.49.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "litellm", marker = "python_full_version >= '3.10'" }, + { name = "litellm", version = "1.75.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "magicattr", marker = "python_full_version >= '3.10'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -1222,7 +1222,7 @@ dependencies = [ { name = "pydantic", marker = "python_full_version >= '3.10'" }, { name = "regex", marker = "python_full_version >= '3.10'" }, { name = "requests", marker = "python_full_version >= '3.10'" }, - { name = "rich", marker = "python_full_version >= '3.10'" }, + { name = "rich", version = "13.7.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "tenacity", marker = "python_full_version >= '3.10'" }, { name = "tqdm", marker = "python_full_version >= '3.10'" }, { name = "ujson", marker = "python_full_version >= '3.10'" }, @@ -2230,21 +2230,50 @@ wheels = [ [[package]] name = "litellm" -version = "1.75.8" +version = "1.63.11" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] dependencies = [ - { name = "aiohttp" }, + { name = "aiohttp", marker = "python_full_version < '3.10'" }, { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "httpx", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "jsonschema", marker = "python_full_version < '3.10'" }, + { name = "openai", marker = "python_full_version < '3.10'" }, + { name = "pydantic", marker = "python_full_version < '3.10'" }, + { name = "python-dotenv", marker = "python_full_version < '3.10'" }, + { name = "tiktoken", marker = "python_full_version < '3.10'" }, + { name = "tokenizers", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/47/7955210e2540fcde86a5848432823e7eb3a83b03aa06e97d63d1b07c3b30/litellm-1.63.11.tar.gz", hash = "sha256:89930895121d0cbf5553e560ed886c45be480ceec0eca3c53ae441473d5d46a4", size = 6630071, upload-time = "2025-03-15T05:48:19.003Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/73/82aa275f2dd751d98e81b3287bc00366e9ec2d6cf9e1a7eff6522d5d2775/litellm-1.63.11-py3-none-any.whl", hash = "sha256:f3915dc35309b164ef2419ad05e5241ddd97f3f47aa036df28365bf889d8ea23", size = 6948073, upload-time = "2025-03-15T05:48:16.45Z" }, +] + +[[package]] +name = "litellm" +version = "1.75.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "aiohttp", marker = "python_full_version >= '3.10'" }, { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "httpx" }, - { name = "importlib-metadata" }, - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "openai" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "tiktoken" }, - { name = "tokenizers" }, + { name = "httpx", marker = "python_full_version >= '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version >= '3.10'" }, + { name = "jinja2", marker = "python_full_version >= '3.10'" }, + { name = "jsonschema", marker = "python_full_version >= '3.10'" }, + { name = "openai", marker = "python_full_version >= '3.10'" }, + { name = "pydantic", marker = "python_full_version >= '3.10'" }, + { name = "python-dotenv", marker = "python_full_version >= '3.10'" }, + { name = "tiktoken", marker = "python_full_version >= '3.10'" }, + { name = "tokenizers", marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8d/4e/48e3d6de19afe713223e3bc7009a2003501420de2a5d823c569cefbd9731/litellm-1.75.8.tar.gz", hash = "sha256:92061bd263ff8c33c8fff70ba92cd046adb7ea041a605826a915d108742fe59e", size = 10140384, upload-time = "2025-08-16T21:42:24.23Z" } wheels = [ @@ -4044,14 +4073,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/60/50fbb6ffb35f733654466f1a90d162bcbea358adc3b0871339254fbc37b2/requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14", size = 14782, upload-time = "2025-05-21T13:42:04.007Z" }, ] +[[package]] +name = "rich" +version = "13.7.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/01/c954e134dc440ab5f96952fe52b4fdc64225530320a910473c1fe270d9aa/rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432", size = 221248, upload-time = "2024-02-28T14:51:19.472Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", size = 240681, upload-time = "2024-02-28T14:51:14.353Z" }, +] + [[package]] name = "rich" version = "14.1.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] dependencies = [ { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "pygments" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } wheels = [ From e0d629e44e9b9f2efde7763c8740522f3d645fbb Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Thu, 28 Aug 2025 22:56:40 +0000 Subject: [PATCH 15/20] use dotenv --- src/datasmith/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/datasmith/__init__.py b/src/datasmith/__init__.py index f3ee4dd..707e7fd 100644 --- a/src/datasmith/__init__.py +++ b/src/datasmith/__init__.py @@ -1,5 +1,7 @@ import os +import dotenv + from datasmith.agents.config import configure_agent_backends from datasmith.logging_config import configure_logging @@ -8,11 +10,9 @@ def setup_environment() -> None: + # Load environment variables from .env file if it exists if os.path.exists("tokens.env"): - with open("tokens.env", encoding="utf-8") as f: - lines = f.readlines() - tokens = {line.split("=")[0].strip(): line.split("=")[1].strip() for line in lines if "=" in line} - os.environ.update(tokens) + dotenv.load_dotenv("tokens.env") else: logger.warning("No tokens.env file found. Skipping environment variable setup.") From 73b651b22728e399be7e8ebf15e2c3aba454d5d0 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Thu, 28 Aug 2025 23:00:57 +0000 Subject: [PATCH 16/20] remove unneccessary type hints --- pyproject.toml | 1 + src/datasmith/collation/collate_benchmark_results.py | 6 +++--- src/datasmith/detection/detect_breakpoints.py | 2 +- src/datasmith/scrape/scrape_dashboards.py | 2 +- uv.lock | 2 ++ 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a462925..b95e27b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ dependencies = [ "numpy", "pandas", "portkey-ai>=1.14.3", + "python-dotenv>=1.1.1", "requests", "ruptures", "simple-useragent", diff --git a/src/datasmith/collation/collate_benchmark_results.py b/src/datasmith/collation/collate_benchmark_results.py index 65ceeb0..98ab4d0 100644 --- a/src/datasmith/collation/collate_benchmark_results.py +++ b/src/datasmith/collation/collate_benchmark_results.py @@ -4,9 +4,9 @@ from typing import Optional import pandas as pd -from asv.commands.publish import Publish # type: ignore[import-untyped] -from asv.config import Config # type: ignore[import-untyped] -from asv.util import write_json # type: ignore[import-untyped] +from asv.commands.publish import Publish +from asv.config import Config +from asv.util import write_json from git import Repo from datasmith.logging_config import get_logger diff --git a/src/datasmith/detection/detect_breakpoints.py b/src/datasmith/detection/detect_breakpoints.py index f9ee149..43d4c01 100644 --- a/src/datasmith/detection/detect_breakpoints.py +++ b/src/datasmith/detection/detect_breakpoints.py @@ -2,7 +2,7 @@ import typing -import asv # type: ignore[import-untyped] +import asv import numpy as np import pandas as pd import ruptures as rpt # type: ignore[import-untyped] diff --git a/src/datasmith/scrape/scrape_dashboards.py b/src/datasmith/scrape/scrape_dashboards.py index efebd97..f83aa69 100644 --- a/src/datasmith/scrape/scrape_dashboards.py +++ b/src/datasmith/scrape/scrape_dashboards.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Callable -import asv # type: ignore[import-untyped] +import asv import pandas as pd from tqdm import tqdm diff --git a/uv.lock b/uv.lock index a231adf..8176a46 100644 --- a/uv.lock +++ b/uv.lock @@ -968,6 +968,7 @@ dependencies = [ { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pandas" }, { name = "portkey-ai" }, + { name = "python-dotenv" }, { name = "requests" }, { name = "ruptures" }, { name = "simple-useragent" }, @@ -1010,6 +1011,7 @@ requires-dist = [ { name = "numpy" }, { name = "pandas" }, { name = "portkey-ai", specifier = ">=1.14.3" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, { name = "requests" }, { name = "ruptures" }, { name = "simple-useragent" }, From 98a9c4efeba02d104d9b9fc6a1ff54ab5ba460d9 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Thu, 28 Aug 2025 23:09:44 +0000 Subject: [PATCH 17/20] remove support for 3.13 --- .github/workflows/main.yml | 2 +- pyproject.toml | 1 - tox.ini | 3 +-- uv.lock | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cd47c33..c2c0e61 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false defaults: run: diff --git a/pyproject.toml b/pyproject.toml index b95e27b..b1c4087 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ diff --git a/tox.ini b/tox.ini index a75c913..e11102a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] skipsdist = true -envlist = py39, py310, py311, py312, py313 +envlist = py39, py310, py311, py312 [gh-actions] python = @@ -8,7 +8,6 @@ python = 3.10: py310 3.11: py311 3.12: py312 - 3.13: py313 [testenv] passenv = PYTHON_VERSION diff --git a/uv.lock b/uv.lock index 8176a46..c06bb3c 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.9, <4.0" resolution-markers = [ "python_full_version >= '3.12'", From b5f5c43820c7fe4ee2b49bffcd60d086891f500f Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sat, 30 Aug 2025 10:17:06 +0000 Subject: [PATCH 18/20] updating build agent to be reactive --- CONTRIBUTING.md | 126 -- scratch/context_registry_updated.json | 1950 +++++++++++++++++ .../scripts/initialize_context_registry.py | 2 +- scratch/scripts/synthesize_contexts.py | 1 + src/datasmith/agents/build_agent.py | 245 +++ src/datasmith/agents/container_toolbox.py | 332 +++ src/datasmith/agents/context_synthesis.py | 407 ++-- src/datasmith/agents/tool_executor.py | 118 + src/datasmith/docker/context.py | 110 +- src/datasmith/docker/probe_build.sh | 46 + uv.lock | 59 +- 11 files changed, 3085 insertions(+), 311 deletions(-) delete mode 100644 CONTRIBUTING.md create mode 100644 scratch/context_registry_updated.json create mode 100644 src/datasmith/agents/build_agent.py create mode 100644 src/datasmith/agents/container_toolbox.py create mode 100644 src/datasmith/agents/tool_executor.py create mode 100644 src/datasmith/docker/probe_build.sh diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 2e5c8b1..0000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,126 +0,0 @@ -# Contributing to `datasmith` - -Contributions are welcome, and they are greatly appreciated! -Every little bit helps, and credit will always be given. - -You can contribute in many ways: - -# Types of Contributions - -## Report Bugs - -Report bugs at https://github.com/formula-code/datasmith/issues - -If you are reporting a bug, please include: - -- Your operating system name and version. -- Any details about your local setup that might be helpful in troubleshooting. -- Detailed steps to reproduce the bug. - -## Fix Bugs - -Look through the GitHub issues for bugs. -Anything tagged with "bug" and "help wanted" is open to whoever wants to implement a fix for it. - -## Implement Features - -Look through the GitHub issues for features. -Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. - -## Write Documentation - -datasmith could always use more documentation, whether as part of the official docs, in docstrings, or even on the web in blog posts, articles, and such. - -## Submit Feedback - -The best way to send feedback is to file an issue at https://github.com/formula-code/datasmith/issues. - -If you are proposing a new feature: - -- Explain in detail how it would work. -- Keep the scope as narrow as possible, to make it easier to implement. -- Remember that this is a volunteer-driven project, and that contributions - are welcome :) - -# Get Started! - -Ready to contribute? Here's how to set up `datasmith` for local development. -Please note this documentation assumes you already have `uv` and `Git` installed and ready to go. - -1. Fork the `datasmith` repo on GitHub. - -2. Clone your fork locally: - -```bash -cd -git clone git@github.com:YOUR_NAME/datasmith.git -``` - -3. Now we need to install the environment. Navigate into the directory - -```bash -cd datasmith -``` - -Then, install and activate the environment with: - -```bash -uv sync -``` - -4. Install pre-commit to run linters/formatters at commit time: - -```bash -uv run pre-commit install -``` - -5. Create a branch for local development: - -```bash -git checkout -b name-of-your-bugfix-or-feature -``` - -Now you can make your changes locally. - -6. Don't forget to add test cases for your added functionality to the `tests` directory. - -7. When you're done making changes, check that your changes pass the formatting tests. - -```bash -make check -``` - -Now, validate that all unit tests are passing: - -```bash -make test -``` - -9. Before raising a pull request you should also run tox. - This will run the tests across different versions of Python: - -```bash -tox -``` - -This requires you to have multiple versions of python installed. -This step is also triggered in the CI/CD pipeline, so you could also choose to skip this step locally. - -10. Commit your changes and push your branch to GitHub: - -```bash -git add . -git commit -m "Your detailed description of your changes." -git push origin name-of-your-bugfix-or-feature -``` - -11. Submit a pull request through the GitHub website. - -# Pull Request Guidelines - -Before you submit a pull request, check that it meets these guidelines: - -1. The pull request should include tests. - -2. If the pull request adds functionality, the docs should be updated. - Put your new functionality into a function with a docstring, and add the feature to the list in `README.md`. diff --git a/scratch/context_registry_updated.json b/scratch/context_registry_updated.json new file mode 100644 index 0000000..e7845fb --- /dev/null +++ b/scratch/context_registry_updated.json @@ -0,0 +1,1950 @@ +{ + "contexts": { + "Task(owner='apache', repo='arrow', sha='3d6d5817313920abc71c854828d95b63b2562938', commit_date=1726645863.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='apache', repo='arrow', sha='77f099fb5c324afc8ee38cda4976bf20a08e7a4a', commit_date=1668536482.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to wheel if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='arviz-devs', repo='arviz', sha='904129035bb29d1316833cf6f5f1b5ccf69973e3', commit_date=1577571349.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='arviz-devs', repo='arviz', sha='d58fd616bdbf2f269ca66d293428f14b97064946', commit_date=1569629064.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='asdf-format', repo='asdf', sha='8d342d36794f92db7b14a7a6f1415ff5d65fed9e', commit_date=1701819981.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='asdf-format', repo='asdf', sha='8e7fe6cab33649cb55fd5cdcac6cca77d9e9453c', commit_date=1698664980.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='astropy', repo='astropy', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\nROOT_PATH=${PWD}\ngit clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install -e . scipy matplotlib\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bjodah', repo='chempy', sha='10bdaa5a1d128959ec10128246d977fd137c9671', commit_date=1444135786.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" python setup.py sdist bdist_wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bjodah', repo='chempy', sha='f61bd0bc1083a4fa90c736d74d591c9eef51f80c', commit_date=1535629364.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bloomberg', repo='memray', sha='51aa84e51179d80758b3bbd7dce097b2b2e4fd19', commit_date=1701719904.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pkgconfig\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge libunwind\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='bloomberg', repo='memray', sha='926624f40e4f71bb71c8e22106d7979cb06bb29a', commit_date=1673995384.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='calebbell', repo='thermo', sha='436a9ccd0c73c55df4d4a8f7383493f540a6b13f', commit_date=1641864678.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='calebbell', repo='thermo', sha='71259b242aadd45a5e1d2249e29019a2e856ac04', commit_date=1643426520.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='danielgtaylor', repo='python-betterproto', sha='c82816b8be4d6f240cde4e5f28234e5ee3b26920', commit_date=1697423550.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='danielgtaylor', repo='python-betterproto', sha='ca6b9fe1a2ccf7e8a9b02085a56de905e89eea69', commit_date=1697455035.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable install fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='datalad', repo='datalad', sha='83447c2944e4ed89e0a82ff2a3ea9b74221e8990', commit_date=1606433958.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='datalad', repo='datalad', sha='a9f423a8da0d144c88a74893449b6cb88cee3588', commit_date=1637870957.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dedupeio', repo='dedupe', sha='7d2c79becabe375980613ff3bf66da678cbad658', commit_date=1719492316.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dedupeio', repo='dedupe', sha='9d527acc20f565f6859e9ee6f4a4903c0629a29f', commit_date=1673926972.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='deepchecks', repo='deepchecks', sha='9a5dd7dc90640d987d6ecf03b8bd9a1ea86199cb', commit_date=1658146693.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Fix the invalid version issue by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='deepchecks', repo='deepchecks', sha='e836e79da9cc0ac9e99ae3d4bfdd2982cd299080', commit_date=1661253434.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the invalid version error by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='default', repo='default', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone\n", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='devitocodes', repo='devito', sha='ccfb8230f2e5030e4a7b3548334e2d03757841f6', commit_date=1708609467.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='devitocodes', repo='devito', sha='e37d6ffc9edf5b0acc2e0b68c1853052c2959fda', commit_date=1719409850.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dipy', repo='dipy', sha='26ad85ff190ad0145f73fc87354cb12f2792a475', commit_date=1712766187.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dipy', repo='dipy', sha='984a2bbff98c7090a222fde52c3b7f6b0b3a189e', commit_date=1751068916.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='django-components', repo='django-components', sha='2472c2ad338a23fba015d4d9816cb62d1325455f', commit_date=1742720064.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='django-components', repo='django-components', sha='e0b718c31495a400d6e8712ed931ce4ab253e673', commit_date=1745142786.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dottxt-ai', repo='outlines', sha='1e8022e210dc7eb193d8e5808a617b1a9dc15644', commit_date=1752229063.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='dottxt-ai', repo='outlines', sha='e9485cf2126d9c14bd749d55f8aa6729d96808d0', commit_date=1732739305.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='geopandas', repo='geopandas', sha='7d50380229eb84375546c2dc586de659096a6e61', commit_date=1531683944.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='geopandas', repo='geopandas', sha='c07ae3c50b6aa20e745b3693321c469e0d828a1c', commit_date=1611525697.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='h5py', repo='h5py', sha='1487a54fb5149603dcc32604df4db418ea4f5236', commit_date=1663429492.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='h5py', repo='h5py', sha='a8e82bcd63de14daddbc84c250a36c0ee8c850f6', commit_date=1602327474.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='holoviz', repo='datashader', sha='00220d8d24a4ada0ac8d30b6875004af5b03fdc4', commit_date=1738081225.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='holoviz', repo='datashader', sha='d9403a963e10e57cbf6c00c64c2998e9931097c0', commit_date=1736788153.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install hatchling\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='inducer', repo='loopy', sha='628b37187bec02ecd863662a96d024fbea5e89bf', commit_date=1623653651.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='inducer', repo='loopy', sha='b5da71bb9abf90848e0f196eedbd564d4fc477d2', commit_date=1623736465.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='ipython', repo='ipyparallel', sha='127b48f8bfeb3576c27e734a5414599fbbd4037e', commit_date=1679989417.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='ipython', repo='ipyparallel', sha='1cda27e603bf6e14866d085822afbf19b04d7574', commit_date=1681399422.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='kedro-org', repo='kedro', sha='507ebe4fbb660cd38e7ba5f9fbf89d35bfce29a4', commit_date=1746617473.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='kedro-org', repo='kedro', sha='b3a29d18f8ba2572a371f92b6f862148b77ffec6', commit_date=1744035416.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='lmfit', repo='lmfit-py', sha='9f9af6f36c0928767ea8b004ea8cb5a16aba6b04', commit_date=1634240070.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='lmfit', repo='lmfit-py', sha='f3dfdd8607aca6aceae29fb3fd57e03fd308a472', commit_date=1547606940.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='makepath', repo='xarray-spatial', sha='4df552cb70ae2f6f07b4325bcbf6a1b2afdb6718', commit_date=1643710398.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pyct\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='makepath', repo='xarray-spatial', sha='59984d859820e6e1cd9f11f1bf7696c04d1924fb', commit_date=1646634548.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install param pyct # Ensure 'param' and 'pyct' are installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arctic', sha='91c2d269d7ad48db23799b3d21cb191880286806', commit_date=1519908330.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arctic', sha='d33d24bb8d6d6625351b316ce55b74ef8c957744', commit_date=1521040101.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arcticdb', sha='97493e6cf3b46f52204ce5ef436f1e828f6b0bb3', commit_date=1728297449.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='man-group', repo='arcticdb', sha='dd4617e309c5b31cebe79816ea43bf1136b59365', commit_date=1722514119.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation .\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mars-project', repo='mars', sha='a4645734e87bd01320ecf28191f6954dd034cbf4', commit_date=1654482585.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mars-project', repo='mars', sha='acecc9c6bdb7fbd45003e4a37424c42a4cec8ac2', commit_date=1652428417.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0030b3864eb77a90a9442904e7d64d1619c6add5', commit_date=1607478583.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0880dc18c211a6508240a43ff6fe618c9be7f568', commit_date=1617487191.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0b6b372fdfcdef15aacbe1c2b82d728f4f1c0401', commit_date=1607478582.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='0f9a6e558a5798880c7b5604346a8a15826d0187', commit_date=1607980018.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package-requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r ${ROOT_PATH}/package-requirements.txt\n fi\n\n # Install optional dependencies that may be needed for tests\n micromamba run -n \"asv_${version}\" pip install pytest-xdist pytest-cov sphinx sphinx-sitemap sphinx-rtd-theme\n\n # Build and install the package\n cd ${ROOT_PATH}\n # First try pip install with --no-deps to avoid dependency conflicts\n if ! micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .; then\n # If that fails, try building wheel and installing\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install --no-deps dist/*.whl\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='108ffe0b19080b39975a93f947162f7371ac9144', commit_date=1539114837.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install project in development mode\n cd ${ROOT_PATH}\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='13a5df0fcbf13852da5613cefd84708e1fd506c6', commit_date=1618051027.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='18372f1526d8b0df776232504afe508ae8944b4c', commit_date=1696946134.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions below 3.9 as per error message\n if [[ \"$version\" < \"3.9\" ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n \n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\" build\n \n # Install additional dependencies from requirements files\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r package/requirements.txt\n fi\n \n # Build and install MDAnalysis from package directory\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e package/\n # Install dependencies after the package installation\n micromamba run -n \"asv_${version}\" pip install -e package/[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1cd2b3b4f4d70c24c8de234d35ba1a7f900212c0', commit_date=1629565332.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.71\" \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1cfe404e5d2c2a807162d4e3d440b6969e14d87b', commit_date=1732305078.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build and test dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.36\" \"numpy>=1.21.0\" pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython tidynamics\n \n # Ensure build system requirements are met\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel\n \n # Look for package in subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Build and install MDAnalysis in development mode\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .[test,analysis]\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .[test,analysis]\n else\n echo \"Neither pyproject.toml nor setup.py found. Cannot install package.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='1eca65591fa402584dd29b6d1a02111af30e68eb', commit_date=1691706686.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install additional required packages\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n # Install requirements one by one to better handle failures\n while IFS= read -r requirement || [[ -n \"$requirement\" ]]; do\n # Skip empty lines and comments\n [[ -z \"$requirement\" || \"$requirement\" =~ ^#.*$ ]] && continue\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < package/requirements.txt\n fi\n \n # Build and install MDAnalysis\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='264d6f9357a978444baa1f99411a03453664ab2b', commit_date=1672867721.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='26880f0f0a4bb831fca9668650400858c34f442b', commit_date=1602889606.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='2ee4e9da5aa3a2c1b21fc3d1897bd70e0ab2064d', commit_date=1602770152.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis with optimized flags\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='35d9d2e3ab08e7e6741b57fe02a7215fe3b91a6c', commit_date=1742597504.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and tools\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install networkx matplotlib seaborn netCDF4 mmtf-python gsd biopython parmed griddataformats joblib threadpoolctl scikit-learn hypothesis codecov\n \n # Install MDAnalysis in editable mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='392c8ae5391e20f5e496f7ac03dae08c44deca3b', commit_date=1646727863.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Check for pyproject.toml or setup.py and install accordingly\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='39b0e4cc184725cd0e5e710780c8154ed4de9f4f', commit_date=1524419705.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools>=45.0\" wheel \"cython>=0.29\" numpy\n \n # Install MDAnalysis in editable mode with specific dependencies for this older version\n micromamba run -n \"asv_${version}\" pip install --no-deps --verbose --editable \"${ROOT_PATH}\"\n \n # Install runtime dependencies appropriate for the 2018 version\n micromamba run -n \"asv_${version}\" pip install six mmtf-python mock biopython networkx gsd scipy matplotlib\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='404040598f78db05882fa5b2bba1d35fc6a30510', commit_date=1605754667.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='42c541771ab7aee318783d296caa3e10b33f53eb', commit_date=1613225552.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13,<3.0\" \"numpy>=1.16.0\" \"setuptools>=40.8.0\" wheel\n \n # Additional dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd matplotlib netcdf4 networkx\n \n # Build and install MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='4365f3b07c1bf2ebcf16424b26162102954c5b90', commit_date=1591777205.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='45e56e8314c278e3eb98ed7a6029b74e7435e8be', commit_date=1598362533.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='474be5bbe32270bb9ddf02dc3cab74d3c1312c5e', commit_date=1728274662.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n \n # Check if we're in the right directory and if setup.py exists\n cd ${ROOT_PATH}\n if [ ! -f \"setup.py\" ] && [ ! -f \"pyproject.toml\" ]; then\n # Try to find the package directory\n if [ -d \"package\" ]; then\n cd package\n elif [ -d \"mdanalysis\" ]; then\n cd mdanalysis\n fi\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='481e36a3aa8767c4b895eabfd7ef8b89132ab611', commit_date=1723835551.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx tidynamics biopython\n \n # Check if we're in the right directory and if setup.py exists\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd \"${ROOT_PATH}/package\" || exit 1\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='4fafd51de84d5b89be0559a412acefde0040847c', commit_date=1726273184.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='52b3d261240efed0546d9f15ee42c7f445e72c13', commit_date=1693261706.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with version constraints for Python 3.8\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython\n \n # Install additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock biopython networkx gsd\n \n # Install package in development mode\n cd \"${ROOT_PATH}\"\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found. Attempting direct install.\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='559528f3349bebcaeb82e7f97fd6b76ae8aecce2', commit_date=1501861121.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install numpy and other build dependencies separately to handle version constraints\n if [[ \"$version\" == \"2.7\" ]]; then\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<1.17\" \"scipy<1.3\" \"cython<3.0\" pytest setuptools\n else\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<2.0\" \"scipy<2.0\" \"cython<3.0\" pytest setuptools\n fi\n \n # Install compilers and build tools\n micromamba install -y -n \"asv_${version}\" -c conda-forge compilers wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5948963e0e9d92c9ddd0829ba3df3d9d496bbf01', commit_date=1672872621.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install --upgrade \"pip<24.0\" setuptools wheel\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\"\n \n # Install package requirements from package directory\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install with relaxed constraints and ignore errors\n sed 's/>=/~=/g' ${ROOT_PATH}/package/requirements.txt | grep -v \"numpy\" | micromamba run -n \"asv_${version}\" pip install -r /dev/stdin || true\n fi\n \n # Install additional dependencies needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"biopython~=1.80\" fasteners griddataformats\n \n # Install the package in development mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n else\n echo \"No pyproject.toml or setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='59f4e395178240d5e3f36088d7a4d98ddd0e3607', commit_date=1680135568.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx-rtd-theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5bf1979b36cd4d5f55d691e6927aa606fbeb8791', commit_date=1703619619.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install \"numpy>=1.21.0\" \"cython>=0.29.32\" \"mmtf-python>=1.0.0\" gsd biopython scipy pytest\n\n # Look for package directory containing setup.py\n cd \"${ROOT_PATH}\"\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n fi\n\n # Set environment variables to help with compilation\n export CFLAGS=\"-std=c99 -O3 -funroll-loops -fsigned-zeros\"\n export NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION\n\n # Build and install MDAnalysis with specific build settings\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5c19974c43125c94f98ab45d2f9965c70e427eec', commit_date=1541518721.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-deps \"cython>=0.16\" numpy>=1.10.0 biopython>=1.71 networkx>=1.0 griddataformats>=0.4.0 six>=1.4.0 fasteners mmtf-python>=1.0.0 tqdm>=4.43.0 packaging>=20.0 pytest>=3.3.0 mock\n \n # Build and install MDAnalysis in development mode with specific numpy version constraint\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"numpy>=1.16.5,<2.0\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='5cf8c5599e1a27c53e774c436b4e03fe71080f7a', commit_date=1534279531.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional dependencies required by MDAnalysis\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python tqdm mock joblib\n\n # Build and install MDAnalysis with appropriate flags\n if [[ \"$version\" == \"2.7\" ]]; then\n # For Python 2.7, use a more conservative installation approach\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable ${ROOT_PATH}\n else\n # For Python 3.x, use build isolation disabled for better compatibility\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='61e236d45c52030d74ba6277c0a59e8a43a13ea9', commit_date=1593710203.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with pinned versions appropriate for 2020\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29,<0.30\" \"numpy>=1.13.0,<1.19\" \"biopython>=1.71,<1.78\" \\\n \"networkx>=1.0,<2.5\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0,<1.6\" \\\n \"joblib<1.0\" \"mock\" \"psutil<5.8\" \"pytest<6.0\" \"pytest-cov\" \"pytest-xdist<2.0\" \"hypothesis<6.0\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='62c35d49bd9458f2b5057d28d4904391a4a38513', commit_date=1534780584.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='6bc52ec2f0744cdf3c63a2e43aff232381ec4dd1', commit_date=1669766518.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n\n # Ensure we're in the package directory\n cd ${ROOT_PATH}/package || cd ${ROOT_PATH}\n\n # Try to build and install MDAnalysis\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither setup.py nor pyproject.toml found in current directory\"\n exit 1\n fi\n\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='6d5ef34292899958ea2a0148388ecc47cf499da1', commit_date=1620729923.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='73cd1e69be88f1b47b1327c1918c0ad326bec302', commit_date=1603501474.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install build dependencies \n micromamba install -y -n \"asv_${version}\" -c conda-forge numpy scipy cython pytest compilers setuptools pip wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='740cae26820eba538f9990ec904adc9f39a65b2e', commit_date=1619881090.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='740e74e8c61ea01a4b2120bd369b11a58cb9c304', commit_date=1728331627.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create base environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \\\n cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme \\\n mmtf-python mock gsd griddataformats tidynamics \\\n setuptools wheel build\n\n # Look for package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n\n # Try to build and install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found in current directory\"\n exit 1\n fi\n cd ${ROOT_PATH}/benchmarks\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='79dead30cc19cd821617a6746663a68709b276e0', commit_date=1754497815.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python ninja cython packaging\n # Build and install MDAnalysis with meson\n cd ${ROOT_PATH}\n # Ensure we're in the package directory with pyproject.toml\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n fi\n micromamba run -n \"asv_${version}\" python -m pip install --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='7c468a46344d17f91d44059332fcc533dad01cde', commit_date=1567026117.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython networkx matplotlib gsd griddataformats tidynamics\n \n # Install package in development mode with explicit build step\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='81b8ef51e5bc1aa2824294ac6c52818c74975658', commit_date=1741727282.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python mock gsd griddataformats scipy matplotlib biopython networkx tidynamics\n\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"No setup files found in expected locations. Please check repository structure.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='84ee67b99fc3bf165d2f58057fac3315d8bb33af', commit_date=1727431157.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install packaging\n \n # Try to find and build from package directory\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n fi\n else\n echo \"Package directory not found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='8599e47b77a89486a1ffe97a3f146751611d9595', commit_date=1680132537.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='8c3577f5a72bee654d94367e4bef51791ffa5d0b', commit_date=1591177328.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six gsd mmtf-python networkx matplotlib biopython griddataformats GridDataFormats scipy tqdm joblib mock\n\n # Install package in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='93c4a97761469a2fd013c280d04435ae178f2c44', commit_date=1693273052.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions < 3.9 as MDAnalysis requires Python 3.9+\n if [[ $(echo \"$version\" | cut -d. -f2) -lt 9 ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm scipy biopython\n\n # Ensure we're in the root directory\n cd ${ROOT_PATH}\n \n # Look for package subdirectory\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Try to build and install the package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\n \n # Install remaining dependencies\n micromamba run -n \"asv_${version}\" pip install -e .[test,doc]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='95fedb590d1afd268c0a643302cd703b8756f5d3', commit_date=1685194826.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Check Python version requirement (MDAnalysis needs >=3.9)\n if (( $(echo \"$version\" | cut -d. -f1,2 | sed 's/\\.//' | bc) < 39 )); then\n echo \"Skipping Python $version as MDAnalysis requires Python >=3.9\"\n continue\n fi\n\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \\\n numpy scipy cython pytest compilers \\\n gsd networkx matplotlib tqdm pandas\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n\n # Install build dependencies and package\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e \".[test,analysis]\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='9a2cd43fccd4426f91b195ea9902e5b78a6c2e3b', commit_date=1710090427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel matplotlib pandas\n micromamba run -n \"asv_${version}\" pip install GridDataFormats mmtf-python networkx fasteners mda-xdrlib waterdynamics pathsimanalysis mdahole2\n \n # Install the package in editable mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='9ba1ab964920acfc986d8e264f78c965e062e9d0', commit_date=1511010257.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n # Using older versions since this is a 2017 commit\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<=1.13\" \"scipy<=1.0\" \"cython<=0.27\" setuptools wheel pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"sphinx<1.7\" \"sphinx_rtd_theme<0.3\"\n \n cd ${ROOT_PATH}\n # Build and install MDAnalysis with optimizations disabled to avoid timeouts\n CFLAGS=\"-O0\" CXXFLAGS=\"-O0\" micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a1bca526f473325f91c12fb15c887243a2a9244b', commit_date=1646736472.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode with older Cython version\n cd ${ROOT_PATH}\n # First install the core package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable package/\n # Then install the test suite\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable testsuite/\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a3672f216aa162f2549d1712fad0118b2cc98d49', commit_date=1734398599.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install gsd mmtf-python networkx scipy tqdm packaging matplotlib biopython griddataformats\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a6034750dc47c8904a297efa184292c73c0690bb', commit_date=1692115614.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy \"packaging<22\" pytest\n \n # Install package in development mode\n if [ -f \"package/setup.py\" ]; then\n cd package\n fi\n \n # Try to build and install\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n \n # Return to root directory\n cd ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a64eed98b38307e4699b59eef9f265cbead37ad6', commit_date=1607980019.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with explicit numpy dependency\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable .\n micromamba run -n \"asv_${version}\" pip install numpy scipy\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='a6edec02af44fbb4589ef1da25a54a4cc8895ee4', commit_date=1671201733.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd netcdf4 bzip2 mmtf-python\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install pytest-xdist sphinx sphinx_rtd_theme\n\n # Ensure we're in the root directory before building\n cd \"${ROOT_PATH}\"\n\n # Build and install MDAnalysis with optimizations\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n \n # First try pyproject.toml-based install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n # Fallback to setup.py if exists\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found. Checking package subdirectories...\"\n # Check for package subdirectories\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n cd \"${ROOT_PATH}\"\n elif [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" python setup.py develop\n cd \"${ROOT_PATH}\"\n else\n echo \"No installation method found. Build failed.\"\n exit 1\n fi\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='aaa4456db50e237cf580c8c986c00d7c5fbe3075', commit_date=1703622753.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n\n # Navigate to package directory if needed\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd ${ROOT_PATH}/package || exit 1\n fi\n \n # Build and install MDAnalysis with specific compiler flags and additional dependencies\n export CFLAGS=\"-DXDR_GETPOS_RETURNS_UINT32=1 -DXDR_SETPOS_RETURNS_INT=1\"\n # Try installing with conda-forge compilers first\n micromamba install -y -n \"asv_${version}\" -c conda-forge gcc gxx\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b3208b39aab61be53f8b610f1fef628f83262205', commit_date=1725909222.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n # Try to find and use setup.py in package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n # Install MDAnalysis in development mode with verbose output\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b5ba8278b3e09b80109aa06f77832be00f8752f0', commit_date=1510724778.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # For older versions of MDAnalysis, build_ext is needed before install\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b672b595b57f6862d486391d646cf30c31fd8501', commit_date=1598490143.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy\n \n # Build and install MDAnalysis with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='b7f36bd148f1eed47f2dc935b89d28c8cae468c4', commit_date=1541446943.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install build dependencies first\n if [ -f \"${ROOT_PATH}/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n fi\n \n # Build and install MDAnalysis with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='bc95e31af1bd1a583161318ab381d005452d48ea', commit_date=1611524871.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='bdb1352f4743aa2101ba2d6b3c9c4fbeb5ae8584', commit_date=1680212962.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='be4b6ee8fa243a0d9e18b936a3d018f2b7418914', commit_date=1650356257.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid Cython errors\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython==0.29.36\" \"numpy<2.0.0\"\n\n # Install required dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"networkx>=2.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.5.0\" \\\n \"biopython>=1.80\" \\\n \"griddataformats>=0.4.0\" \\\n \"packaging\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\"\n\n # Build and install MDAnalysis\n cd \"${ROOT_PATH}\"\n if [ -f \"package/setup.py\" ]; then\n cd package\n # Use --no-build-isolation to ensure our carefully installed dependencies are used\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c07b5c8897688d778e57e1ef34be86f58c969fe7', commit_date=1607478583.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c3289d8994936ce7dbe7842e8877d597ca96360a', commit_date=1752273263.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c565b9d3a11508604a1217e37199ac17a8c618f2', commit_date=1654106359.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython matplotlib tqdm pandas tidynamics\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n # Use build system if pyproject.toml exists\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n # Use setup.py if available\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py or pyproject.toml found in package root directory.\"\n # Try looking in package subdirectory\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Could not find build configuration. Cannot build package.\"\n exit 1\n fi\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c56e8df543e1aba21959a7c7b3029eacd57d9130', commit_date=1661799771.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<2.0.0\" scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n\n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3,<4.0.0\" \\\n \"mock>=3.0.5\" \\\n \"packaging>=20.4\" \\\n \"pytest-xdist>=1.31.0\" \\\n \"pytest-cov>=2.10.1\" \\\n \"pytest-timeout>=1.4.2\" \\\n \"hypothesis>=5.19.0\" \\\n \"psutil>=4.3.1\" \\\n \"biopython>=1.80\" \\\n \"duecredit>=0.9.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"tqdm>=4.43.0\" \\\n \"joblib>=0.12\" \\\n \"fasteners>=0.15\" \\\n \"networkx>=2.0\" \\\n \"threadpoolctl>=2.0.0\"\n\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Build and install MDAnalysis in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c5cbc2551c1175e8d13887783c7ab2894607ac92', commit_date=1671293813.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies that might be needed for compilation\n micromamba install -y -n \"asv_${version}\" -c conda-forge gsd netcdf4 bzip2 gcc gxx\n \n # Install MDAnalysis with verbose output and no build isolation\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c620b141f018628356bb9cdd16eefa640b6080ba', commit_date=1671200774.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n \"numpy<2.0\" \\\n \"cython<3.0\" \\\n setuptools \\\n wheel \\\n pip \\\n build\n\n # Try building and installing from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c6f1a5a5663913f00cc5f727ad0e662bbf23f18f', commit_date=1617010037.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\" setuptools wheel\n \n # Build and install MDAnalysis in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c70504d99e8b6ff7f61778cff1f5956da708ddad', commit_date=1619628547.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='c815614b5ae8ed86eaa0d68e10451fde7e72242b', commit_date=1671293292.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with compatible versions\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install requirements one by one to handle dependencies better\n while IFS= read -r requirement; do\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < \"${ROOT_PATH}/package/requirements.txt\"\n fi\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='cb05695ca422c216406a0eae4040c782a2a03812', commit_date=1629822068.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel cython numpy scipy\n \n # Install optional dependencies that might be needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps matplotlib networkx gsd biopython\n \n # Install the package in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='d2e22ffb0cb46af5266e39b940d7f00c1ca293c1', commit_date=1534167809.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='d73b653f19e8446bbb9de51bb41d71f78d148d30', commit_date=1534803427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis with test dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='da77f2bead8c4a634d2ba5b61cd7d7f841c01c0b', commit_date=1671205345.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist\n # Build and install MDAnalysis using setup.py since pyproject.toml is not found\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='daee516f23ead8e42c2e42b7636f9ec243ab306e', commit_date=1603119467.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='dcfa60a2ee0bcee7f54e969666950941905d825a', commit_date=1621773545.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e213f2be8e8741efc7cdddd35dc4bd2d88e0ff85', commit_date=1745000938.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n \n # Install package-specific dependencies\n cd ${ROOT_PATH}/package\n if [ -f \"requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\n fi\n \n # Build and install package\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in package directory\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e3966303776577e15a043daeceff5a591370398a', commit_date=1534255980.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install additional build dependencies\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python mock joblib\n \n # Install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='e8fbd529fc55cb187d38bdef141d74757f22bdc5', commit_date=1594518308.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='eab18cb8418ddb1dd72b44f474833de4a2999884', commit_date=1654100638.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n mmtf-python \\\n networkx \\\n scipy \\\n matplotlib \\\n tqdm \\\n \"cython<3.0\" \\\n \"numpy<2.0\" \\\n pip \\\n setuptools \\\n wheel \\\n build\n\n # Try building and installing with specific C compiler flags\n cd ${ROOT_PATH}\n export CFLAGS=\"-fcommon\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v .\n\n # If that fails, try alternative installation method\n if [ $? -ne 0 ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='eae5845cf5488ae1db1cdcc2075f68406291721e', commit_date=1517964764.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six mmtf-python mock biopython networkx gsd joblib setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='ee4759293e1a4a5109c6b66e133acb1af7d24b0d', commit_date=1567703043.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='f5e9603f35b1e1587c1a1583793374fbfa0f80c5', commit_date=1629232880.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"cython>=0.29.13\" \\\n \"numpy>=1.16.0\" \\\n \"biopython>=1.74\" \\\n \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.0.0\" \\\n \"joblib\" \\\n \"mock\" \\\n \"packaging\" \\\n \"pytest\" \\\n \"pytest-xdist\" \\\n \"pytest-cov\" \\\n \"pytest-timeout\" \\\n \"psutil\" \\\n \"hypothesis\" \\\n \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\" \\\n \"duecredit\"\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='f7a6f47e9c8c4637770c2c0cc0c20da841d11622', commit_date=1516881817.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools pip wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics six\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fb9e0bc786b21c15cefe0027fc83a441e1b19950', commit_date=1685186356.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm pandas biopython griddataformats scipy\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n \n # Build and install MDAnalysis\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fe22dc3794f1f5d466f9128e4c7050fa0d58e62f', commit_date=1619962288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install cython numpy setuptools wheel\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='fed8be34a3434a621bacd438d2f9307139a24511', commit_date=1511384425.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Skip Python 2.7 as it's not available in conda-forge anymore\n if [[ \"$version\" == \"2.7\" ]]; then\n echo \"Skipping Python 2.7 as it's no longer supported\"\n continue\n fi\n\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy>=1.16\" \"scipy>=1.5\" cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies for the 2017 commit\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.16\" \"biopython>=1.71\" \\\n \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"six>=1.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \\\n \"mock>=2.0.0\" \"psutil>=4.0.0\" \"fasteners>=0.12.0\" \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \"packaging>=20.0\" \"pytest>=3.3.0\" \"pytest-xdist>=1.4.0\" \"pytest-cov>=2.5.1\"\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # Use older build approach appropriate for 2017 commit\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha='ff7ffa10901e2df2be12c3d3dd78e4e0a262e90e', commit_date=1614816697.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with specific version constraints\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.74\" \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.9.3\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \\\n \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode with explicit numpy requirement\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='mdanalysis', repo='mdanalysis', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"asv_${version}\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n fi\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='modin-project', repo='modin', sha='be3e716107a185961fc209c343b0feefe0fb9751', commit_date=1684841207.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='modin-project', repo='modin', sha='c5aac3ef99d14305ea9a130e14155fc37495e199', commit_date=1608304159.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='napari', repo='napari', sha='3b6800763f97452ccf8230abf5a65fd6beedd247', commit_date=1606539287.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='napari', repo='napari', sha='dfeefb43af6538dd1e5ad7820128dfc844dc54b1', commit_date=1723973799.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='networkx', repo='networkx', sha='1071e14b81baaa4f0becc1849e85839ae8c671d9', commit_date=1716269137.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='networkx', repo='networkx', sha='81df24ce59b5b4fddfa65cd0a57db96748bba904', commit_date=1745208237.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='newton-physics', repo='newton', sha='5b18850fd8243e4c707b596880c01c1966e5168e', commit_date=1753825967.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='newton-physics', repo='newton', sha='cd07ab2c989df6392253a77e82333ec57a433e94', commit_date=1751556054.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nilearn', repo='nilearn', sha='6c1a76e37cf1c0dd6b800271cb3994f3efd38d07', commit_date=1744125996.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nilearn', repo='nilearn', sha='73fe9520ea705056f89b1cd5982947de13d515a0', commit_date=1754650581.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='numpy', repo='numpy', sha='4092a9e160cc247a4a45724579a0c829733688ca', commit_date=1459109632.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='numpy', repo='numpy', sha='9c3f0bb9955d530d43487f2ab800c765c83a3ea7', commit_date=1716460609.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='5495dc762dae2f09b648588d0f979e03ea3ef88b', commit_date=1741386626.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --verbose --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='626fc946dcfe2150b6aed956c57e89ec907ca44a', commit_date=1746035128.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Build and install WARP with CUDA support\n CUDA_PATH=/usr/local/cuda micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='9958a89058d16e7ac634c46b37d9aad6c14b3f10', commit_date=1740864850.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install torch cuda-python\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" python build_lib.py\n # Now install in editable mode\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='a447d70c372b4dbe1b574ebf587c51c9742272db', commit_date=1748714623.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build steps\n micromamba run -n \"asv_${version}\" pip install warp-lang\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Now try the editable install\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='a81f7e773f2905e06fe52262002c2e34a5daa4d8', commit_date=1743362346.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy cmake ninja pytest\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='d641e89a288746c380ef9b4871f45b0d862fd69e', commit_date=1755703901.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific dependencies and build requirements\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n # First run build_lib.py to generate required libraries\n cd ${ROOT_PATH}\n # Add missing climits header to fix build error\n sed -i '1i\\#include ' warp/native/bvh.cpp\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Then install WARP without CUDA support since error suggests basic build issues first\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha='dc693d89d5b85ac7e72c7f4e226eb58a5d54131f', commit_date=1751384285.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Ensure the necessary libraries are built before attempting to install the package\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='optuna', repo='optuna', sha='445048a74c9090e60a82a49605044cc42727642a', commit_date=1650874136.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='optuna', repo='optuna', sha='c634449ebbd2160ee44a1845d1efd6c20ee200ae', commit_date=1714538588.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pandas-dev', repo='pandas', sha='2f4c93e8322775a0bb06429a02429b95ba6abb26', commit_date=1698253642.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pandas-dev', repo='pandas', sha='94a8af55b703fbaea19da9902a9790c7b93dc0ad', commit_date=1686591905.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='posthog', repo='posthog', sha='16075ff5c3671587db9e6a6a3ed396058d0f413b', commit_date=1733419912.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the pyproject.toml issue by ensuring the 'version' field is present\n if ! grep -q \"version\" \"${ROOT_PATH}/pyproject.toml\"; then\n echo \"version = '0.1.0'\" >> \"${ROOT_PATH}/pyproject.toml\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='posthog', repo='posthog', sha='3578a0c1c2b6f4425dc0fddf31d3d256bbf3fc87', commit_date=1655908403.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pvlib', repo='pvlib-python', sha='3692427bef155a32eac525fe965ed8d407a7846e', commit_date=1660774705.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pvlib', repo='pvlib-python', sha='b8c56c5e725ed12f15342c5336f71d52ec8008ce', commit_date=1749300951.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pybamm-team', repo='pybamm', sha='b1fc5950f0d8e5c8e104e00573fdff5561818014', commit_date=1723152711.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pybamm-team', repo='pybamm', sha='e1f52ffcf9811bb7d5046af47c48a2291bfd50b8', commit_date=1653925577.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --constraint \"<3.10,>=3.7\"\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='bottleneck', sha='c5356daccdab4afc293f56d4b4ff47c154be5bcd', commit_date=1716493787.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='bottleneck', sha='dc01fad42713181b1f2bb13a965eb0651d1308b6', commit_date=1729241092.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --ignore-requires-python\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='xarray', sha='4cbb7cbd86af1ccfe2b3b98f0e36a410f86d77ef', commit_date=1523669869.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydata', repo='xarray', sha='dd6222f01a476caa96630e26d5b02fad6777a886', commit_date=1747916222.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pydicom', sha='50cd981a068c74b01d854c6cac9bb897fe0b74a9', commit_date=1726970247.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pydicom', sha='87266d96add6a6cccaa3032bbc96b0e3009c6dea', commit_date=1690047796.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pynetdicom', sha='1b701e898b489d561884d20ad78920607a6d1df0', commit_date=1563786471.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pydicom', repo='pynetdicom', sha='bb1f9d164d5c408fc28e02f924b3821b92cb45ad', commit_date=1555925288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pymc-devs', repo='pymc', sha='6360b005fc610d0505f84885743215a3e09f046e', commit_date=1614035911.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pymc-devs', repo='pymc', sha='a06081e1e9649bd56e3528cb96380efdf6bb2dc0', commit_date=1710322397.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pysal', repo='momepy', sha='6467ae26e8bfca9ba91e7795ab7899aaf89c576c', commit_date=1604013921.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pysal', repo='momepy', sha='7619f2f760d9027434369114a49150e3d3a483fb', commit_date=1603224289.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-adaptive', repo='adaptive', sha='50fae4341c53439f57fcea63346ba3581bd187d4', commit_date=1665457361.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-adaptive', repo='adaptive', sha='a9bb7f612717000dd2cf6899d8ebbf479807f6f5', commit_date=1550239213.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='0422c82a80b3ec0dc7fcbc69562f99e35358ee80', commit_date=1680293750.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='283f5e7480a7c39f0e11abe63e3c1ecd8b5d8911', commit_date=1616243491.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file to fix the invalid version error and setup.cfg\n mkdir -p \"${ROOT_PATH}/control\"\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n echo \"[metadata]\nversion = 0.0.0.dev0\" > \"${ROOT_PATH}/setup.cfg\"\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n\n # Try installing in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='2ce4bbd983ce00aa2998bce00c7c161ff7c0f1d5', commit_date=1640530701.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file since setup.py fails due to invalid version\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='4ef15c4e95ec73cf5fc4d571be103e67b00caadf', commit_date=1647713524.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required build dependencies\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib setuptools wheel\n\n # Fix the version in setup.py before installing\n sed -i 's/version='\"'\"'dev'\"'\"'/version='\"'\"'0.0.0.dev0'\"'\"'/' \"${ROOT_PATH}/setup.py\"\n \n # Build and install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='82f3fe343422289f076d6883a2448d169606f821', commit_date=1701474288.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='a042895507367a5d001af7d3febfd8f386497554', commit_date=1739343810.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='a111b03e651d7c1828d264c1b143d9ccc9030b3f', commit_date=1640969033.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='abeb0e46a3d56c98b4534f73202a5a7ef5a0af87', commit_date=1751727883.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='c3c659638fb22bde11e40868f80f540060c50b40', commit_date=1616196419.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha='f7d18f17bf90bfb99a06648982b22d1e4af6ccd2', commit_date=1686374157.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-hyper', repo='h11', sha='80805f06e5859692a9dcc32484b2745b7f215a8a', commit_date=1597311658.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='python-hyper', repo='h11', sha='d64468627a4adeb4140e1480a836c85ba903a2c6', commit_date=1522821575.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pytroll', repo='satpy', sha='94fc4f7749bc2a27f76c7a16a7289037d41120f2', commit_date=1644305622.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pytroll', repo='satpy', sha='aa7f0dd616a973eb2de0e5b77a9ec51d08cc601c', commit_date=1659722497.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pywavelets', repo='pywt', sha='21a30d2af5aca2b3c5f827aa407cb549e2c99fb9', commit_date=1551150162.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --use-pep517\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='pywavelets', repo='pywt', sha='74b44217a66199fa2e0f8e036955fc00f5cbc21a', commit_date=1708613848.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='qiskit', repo='qiskit', sha='023cbd4ec646fc81e0434b6de434bb477ad94979', commit_date=1755506488.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='qiskit', repo='qiskit', sha='b12e9ec3cff020983e3dde9b16f5ccc4fd0f4963', commit_date=1715792171.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython setuptools-rust\n micromamba run -n \"asv_${version}\" pip install rustup\n micromamba run -n \"asv_${version}\" rustup toolchain install stable\n micromamba run -n \"asv_${version}\" rustup default stable\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='quantumlib', repo='cirq', sha='01ae51eebf3b18a5cbee9fc0c697d4e1511c07f2', commit_date=1640302944.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH}\n }\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='quantumlib', repo='cirq', sha='1a75d9faee3b78765bb4badcf73e3d3e72a3ca2a', commit_date=1744652301.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='rockhopper-technologies', repo='enlighten', sha='d239fa5496a6c342b85343d53a4c16d8db9a87a5', commit_date=1698502059.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-image', repo='scikit-image', sha='0ff35b21293405e9922e44b9dda3818db960b87e', commit_date=1674543103.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-image', repo='scikit-image', sha='c7479c1d7430020a9ee9d92f25a1f0c33e36a7c1', commit_date=1597584715.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='044f1b00a62c9083ce3212a3e69046c9afac0de6', commit_date=1662470783.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='04860335c82d557e663b4cfa218663d1c7bf65fd', commit_date=1689974588.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='05ce8141bc71ad21e55be4d1b3f6609f65e91e49', commit_date=1603277025.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='06e566eb86cfd8c6107cf3bc2b477c97b80002a3', commit_date=1705578508.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='08b6157b0e18480569a5cc08efd44dabad9e60ce', commit_date=1701071115.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='092caed407f3b60de7677d4353bfe0db20a2faab', commit_date=1682603301.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0937b4ab48136eb161ead4abd4806d0708b1bb4c', commit_date=1607961058.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0c65bbfe8ce816a181780d2a249c94dd653e115a', commit_date=1642433763.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0c74b8b7d5cdb60dc3a3240cdb36af40b9f40288', commit_date=1615733031.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='0de3b0d1eaacee9f7b15cabc05752cba945c7621', commit_date=1644500459.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1045d16ec13b1cab7878e7555538573d1884aad3', commit_date=1614793397.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='119b837a43d939ec02cf2aeba5bd203f8ebab4c7', commit_date=1649335379.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1415a2890b0451d80feef2d81e921a15d2b9d680', commit_date=1685431571.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='15cb8695a27eb8d4dc281ac3c937e12db8b5a6c1', commit_date=1604221237.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle the multiple packages error\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Create setup.cfg to explicitly specify packages\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\npackage_dir =\n = .\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='16625450b58f555dc3955d223f0c3b64a5686984', commit_date=1652277602.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='191f96908d6bbb46cf7293fb0ac1299f1e8b783d', commit_date=1719904631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1a78993217b52745d63a3495a819efd7f1b0530a', commit_date=1691676945.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1bb0306a1309f9a57d8c652dec731a95cbd0052b', commit_date=1610422145.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to avoid package discovery issues\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1d1aadd0711b87d2a11c80aad15df6f8cf156712', commit_date=1642210241.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1db03ce68be362baa12330ae3f42b9673863fa52', commit_date=1626800410.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='210740408a732940430047fe9437c2193735573f', commit_date=1719586131.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='226da0d7c458816776549c2580abaa4782dc4c48', commit_date=1637400914.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='28831879f2b5a8f623623735480399735c1bb742', commit_date=1755578702.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='2e213c618841f3635885bab034606512c40a7fd4', commit_date=1646246849.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='320b4c61f97fec3facc3c4c2b4cf9351d3425b44', commit_date=1596283836.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='32c5d05cbd7551fd983a250945013239e0e5cb94', commit_date=1631705680.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='408f561b87f9955e92619cbf924d595a2655344f', commit_date=1678175921.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='432778464cbffc8ca675c1df786c31f8c23fc62c', commit_date=1642715056.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='45a817933ef51a24f0c5863c1026b4fe664b26fa', commit_date=1608647213.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to handle the multiple packages issue\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4685cf624582cbc9a35d646f239347e54db798dc', commit_date=1652472968.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='49d26cb63fefe43c9b310136e4f2c172d8c433cb', commit_date=1599140563.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools==60.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and environment variables\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n\n # Install scikit-learn in development mode with specific build settings\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4bc61a09eac44a86758c6a02a2b47f912a696d3b', commit_date=1719575535.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='4e44edebf9e811c718c2842b65db2eb41ba01786', commit_date=1723709827.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='52d93e141a5d874bd288f15cc1d8990f09721aad', commit_date=1754304060.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='55a65a2fa5653257225d7e184da3d0c00ff852b1', commit_date=1695213631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='584d413fec25fb5c38f06c1fe88e652111395330', commit_date=1675930888.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='598045569c8f96fb345059f5316ea8903d374ff4', commit_date=1615476313.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5a332e77a10a44107276843d8532ef79f239c8f3', commit_date=1681854133.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5a850eb044ca07f1f3bcb1b284116d6f2d37df1b', commit_date=1657115862.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5b46d01f8d5015114644b91ce88ee4bc4fa5386d', commit_date=1680769691.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5c4e9a0fd82dd096bbdf78b69c264a741c768a86', commit_date=1690911539.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='5ffec3233034e0413f548380d4a22f4e0eecae94', commit_date=1678722797.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='63a1a31a17f9bd9cdf617b2cf04bfaf2f32f0a17', commit_date=1639082235.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='64d54483edfa55ab44d836f9b08ff1bd38f7f6bb', commit_date=1627659978.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6595229d116b128c5b36f204dc941f69e14abc7f', commit_date=1718288797.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='673f6259f3fb7bd2a057b1889e23b280fe638998', commit_date=1612389138.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='67ca4dda1d61c9ad95ed68b04cb40da2c822e960', commit_date=1678114713.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='687e84a126965b4179b02d86041a9e997eba87c9', commit_date=1751036214.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='691b00f4b7d169d38cc46cf14668a5029b2df8eb', commit_date=1728910531.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6d7d0f275db08ca97e7ce9765e5e8f0604e490dd', commit_date=1641981733.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='6f91cbebe5c439d5712860315616b70cd2ca9f87', commit_date=1633437528.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='701537ecca85a333449814c82ac2b78db5f534a8', commit_date=1682379515.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='70ca21f106b603b611da73012c9ade7cd8e438b8', commit_date=1713791446.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='767e9ae7e4fec8bea36c0433ab42f500aacfde64', commit_date=1651223539.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='769da3d51feef52b97b8129bf4700cf088a247b2', commit_date=1613120619.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" wheel\n\n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='76c28285d3d3eb6a2834b7d1db01e296187c60b8', commit_date=1677233852.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7798fd829d0eb3637da17cc5cb359bf52efa551f', commit_date=1630429058.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7a2f5ca3a8478333f194a085b0c3635d75fcdf4d', commit_date=1678442780.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7aabe53e730947df0f6f1f85d640e6daea5bfc9f', commit_date=1634742992.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7af0a18996efb10fcbcdb15c7c132d2eb36be736', commit_date=1687508727.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7b715111bff01e836fcd3413851381c6a1057ca4', commit_date=1624465784.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7c835d550c1dcaf44938b1c285db017a773d7dba', commit_date=1662054353.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='7f1d4d05064a160e19f786bfbac8996cf0ecac5d', commit_date=1707518612.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='80ebe21ec280892df98a02d8fdd61cbf3988ccd6', commit_date=1638310769.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='830864629e21509980a9c3904c9bb7bf2be8fec5', commit_date=1655213679.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8525ba5d3c3b5423a5599e654ce73b931882a434', commit_date=1754632277.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='863c552c448118249563f0e709ea83a1a9b2fc7f', commit_date=1612010007.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='871892cef9bc70224233fdf2140c896874c07b57', commit_date=1659000389.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='88c2db24bd3efb631372aa971270d6cb690d914d', commit_date=1726476355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='897c0c570511be4b7912a335052ed479ac5ca1f3', commit_date=1705781316.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8a71b840d3d7f6e5db9f9faf3b6c44f8ed6a3850', commit_date=1705345976.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8a7684705f636a8dfcde8e2239d2e0bcd624ac54', commit_date=1647426404.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8ad7c3f02daae525ee83231fbd33fb65e8e05288', commit_date=1633621378.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='8eef0e767c4bdd2fdb83f51b162afa32386d5973', commit_date=1692883694.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='9590c07128d3bad5978f08eeb34613d347b96e38', commit_date=1719499549.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='961afc72e0222cb108b77b68c145ea4424f089da', commit_date=1751880029.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='99410b1bdea296a0df48026aaee85472bf3cb7cf', commit_date=1625818419.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='99562100e941f0972a5a65484ff80f407eeb5137', commit_date=1674572593.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='9c9c8582dff9f4563aa130ef89f155bad0051493', commit_date=1668796144.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='9e38cd00d032f777312e639477f1f52f3ea4b3b7', commit_date=1705585714.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a09a62eda27720a0cb949ea24b1e21d358f95176', commit_date=1676040745.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a47d569e670fd4102af37c3165c9b1ddf6fd3005', commit_date=1652372475.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a67ebbebc173007735e62eef7878c08435d28d89', commit_date=1718987804.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a85b14d4799ba7c4e13e0e942e599f8077dc182e', commit_date=1679350355.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='a8b1905e8f977fcd4d6a348678bb1e82ed9b3310', commit_date=1606807943.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='aa2131f9bdcfa7ff0dacfd6a47c207cbb68a49fa', commit_date=1751370298.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='abbeacc2daee2b213274924a5a4ffe6cbafb0627', commit_date=1651693256.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='aca8f20db461ca0dd70b02b6a1f41b957b2b12ee', commit_date=1665069106.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ad91259f20529306efe445f5a1da4dccc8c81b5a', commit_date=1663256210.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b1202af3b379e698539a2719f2b1e28706ce5388', commit_date=1638654791.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b3aea0053dadcb67adfc39a90c70ffca607a534f', commit_date=1643205359.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b5d55b4fd19ca97d68e4e34e5822865b0a8e90d2', commit_date=1651487470.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b609562c610822ad4b3c11a9e7a22710aba438af', commit_date=1637744681.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b6b6f63ebefe16403d11e8a0a2281b6e2a811933', commit_date=1678791874.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='b8229daafee0e50690d4b8447f93cf1069ba6880', commit_date=1701274890.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='bc7cd3189bc817545791071515693445e1e271db', commit_date=1617352203.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='bf0886bae0ccbc8c5d285b6e2affe7e40474f970', commit_date=1619532370.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c0eb3d37244cc4bf35b82e18bff37320e198b038', commit_date=1670930060.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c217527af5744b9d0db8761c1e3667552312e5e7', commit_date=1652946509.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c300a8f2178fcae847f82ad548fe9452f2ba8bbb', commit_date=1658415495.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c47205fb7d45de50de4afa9760d974e754f103e1', commit_date=1707735651.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c63b21ec309f742defd56033eadfc8f7bf5b510b', commit_date=1711607317.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c6ad7361c8fc68188b83070aa0b6b797058c06fa', commit_date=1646214356.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9138537790cc0fa352968eed927433fe17ee17c', commit_date=1701967415.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9525d1600ecd526b9b98e275fc1b85782c25dea', commit_date=1634072165.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='c9f9b041758c3fa5fdf74b15995a3e3607b0ad5a', commit_date=1737104589.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='caaa1f52a0632294bf951a9283d015f7b5dd5dd5', commit_date=1732650609.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='cbe8648c33b94bd919c35f4d1e2ae1c4432d9749', commit_date=1748364732.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d5901462551283b689284e582152666faf0dc1da', commit_date=1676911719.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d5df806c9715443f5dc7de9023a1b7aa2045eae4', commit_date=1677234005.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d85b1d3302a3ff45179a5826a747e8ee2562f143', commit_date=1674489554.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d8d5637cfe372dd353dfc9f79dbb63c3189a9ecc', commit_date=1644836117.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='d92c76986ac6553ce8e0fe2c1bbaea500c105cc7', commit_date=1679480310.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dc1ea2751e8f4e18f61c7e6d767cf42c6e636256', commit_date=1608485758.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dc580a8ef5ee2a8aea80498388690e2213118efd', commit_date=1670501069.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='de67a4420f1713058070802ad593cbcd2ee2d5f3', commit_date=1677582108.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='df692c03c1a6003878c6fc4d2f9f222d304dcee3', commit_date=1649449476.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dfaef0c6c3aef0d00c72573728c90c1d542e2957', commit_date=1657123469.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='dfda968f1d0b3b1ecaeb4125d3e903416eaf18ec', commit_date=1678100532.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e113897235feaf309eaaed24001ca96f3608602f', commit_date=1648574496.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for potential warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e1db2a8173ca37e561cdfa4384481501c4d50868', commit_date=1644639631.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e1e8c66e05dd638ae785855bfb637e0180aea99c', commit_date=1642748755.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e411c29625e66f7e440f1acce4069e01201cf122', commit_date=1672782103.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e64714637d8cc9f4724ae21ea500e4bdc57b0a39', commit_date=1629207428.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='e7ae63f784c5f85af41cf8f346d194775f01f333', commit_date=1694440694.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eae3f294d3ba8ae636730537faef4cdd612083ff', commit_date=1678119642.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eb2920766d7b2ffb04359a1dc8b6c611960931b7', commit_date=1725568507.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eb85684feb0505694e66365ba9f4d10a409f8f0b', commit_date=1697017427.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ee524f455dbf0285f7b121a08f1e9613a518abcf', commit_date=1617906457.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='eecde00c7a706546271ff40d7d492b5f27046d2b', commit_date=1619516333.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ef200eb16813f4e579f3a4e6cd4603e16f72f5a8', commit_date=1680030341.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ef82b778ecaeee11d6bfd005f59e882410d330b6', commit_date=1751882162.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f1d3417b086550be670cbfbb5b3c1760ac99203f', commit_date=1646068982.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f4ed8ef5e4498c9de2ff4b713c1695d6f312ffba', commit_date=1733748660.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f812e2a27619650463cb12d765f1b443b47c0828', commit_date=1628181136.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f86f41d80bff882689fc16bd7da1fef4a805b464', commit_date=1695653805.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f965fcc0634e47b7230e120850cf7bb4efeb96e7', commit_date=1674829022.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='f96ce586eecb361d53b192ea3b44098d1bd49a77', commit_date=1637843007.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fba028b07ed2b4e52dd3719dad0d990837bde28c', commit_date=1733159260.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fe08016877e8bd715816cf9fbfb1fb697c3446d2', commit_date=1754300286.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='fee76cc5405c01e283a3b079dcb865f3017d5007', commit_date=1705008338.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha='ff9344f3d8d11d38fa3a2497199113e5bac9537c', commit_date=1666642605.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { + "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='57086e91b65b88a95c89449aa501ff68a61dc39a', commit_date=1563459886.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='faa240fd7469176036a91430ae6a0a45e627c94a', commit_date=1531145592.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scipy', repo='scipy', sha='83dbd97a76af8621dd0228a797f5207bed094c23', commit_date=1679643125.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran pybind11\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge openblas\n micromamba run -n \"asv_${version}\" git submodule update --init\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scipy', repo='scipy', sha='b919b4aa67a541b1fef91820a4e94156f7dd36d2', commit_date=1731196689.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install pybind11\n micromamba run -n \"asv_${version}\" pip install openblas\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='cartopy', sha='9a4d894d9adab3b3a8d9cee6299581ba0ef9ec20', commit_date=1662748176.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='cartopy', sha='d9825f18dc6a70b5b4ef6bc5bf48d8025eef1e8e', commit_date=1581379933.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --upgrade setuptools\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='iris', sha='33deead5846b37019902ba067c87e710e55ff6e6', commit_date=1650551816.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scitools', repo='iris', sha='b2ce2a34e2eef7e3d6203c77ada7ed4ce89e3145', commit_date=1573652360.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='anndata', sha='2712af6efcf2d4356f4185a10e92328168710d9f', commit_date=1680623010.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython flit-core setuptools_scm\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='anndata', sha='df213f659f0e9eadfcab4af48ee98de7145252a7', commit_date=1733842403.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='scanpy', sha='7f3f89ac02e924a3a6d55c31730cfaf23b0b4223', commit_date=1744636041.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='scverse', repo='scanpy', sha='ad657edfb52e9957b9a93b3a16fc8a87852f3f09', commit_date=1718709475.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='shapely', repo='shapely', sha='3c3a83986ac5bf434e0ca6b7bd16571a1ddac0a4', commit_date=1696785164.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='shapely', repo='shapely', sha='ff2ceac81cca6240c459eba5a5ce07084fe25ad2', commit_date=1662401853.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sourmash-bio', repo='sourmash', sha='9230fce7479c547c96dabe0c1a749a71a4b9e77c', commit_date=1650894889.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sourmash-bio', repo='sourmash', sha='d2d638b645048cc93377fb9aff8a3be8c937b8b3', commit_date=1613310154.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge rust\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='spotify', repo='voyager', sha='49416c5db539a40adba2588bfe19dc8736db01b2', commit_date=1734118555.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='spotify', repo='voyager', sha='88cfc468617fde8360ac6db7e71bc578ba49ed16', commit_date=1725990271.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"${ROOT_PATH}\"\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}.\"\n exit 1\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sunpy', repo='sunpy', sha='01ea7b5e2760c24e08386f95fd5fd1c0f73da47f', commit_date=1739035442.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='sunpy', repo='sunpy', sha='770f95dbfb033ffacc7172a3cff5158b09f7efe4', commit_date=1651836877.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython extension-helpers\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='textualize', repo='rich', sha='1de94713811101702b8fcf283c64d1a5de5a8213', commit_date=1657547667.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='textualize', repo='rich', sha='cb92947610614e04116f82cb001ed44dda1699fb', commit_date=1647342081.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='tqdm', repo='tqdm', sha='0f823e79f303b4a93ef1381badb1e65757e5070f', commit_date=1603641812.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='tqdm', repo='tqdm', sha='42761473f9edf276937cc3a28a6fcabc59f5f97d', commit_date=1575632008.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='xorbitsai', repo='xorbits', sha='aee883be1dcd4cbbd43d67794932d5c858fcffe2', commit_date=1676955703.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to non-editable if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + }, + "Task(owner='xorbitsai', repo='xorbits', sha='ebc391fe0fa55599c3197c52408bd43a4bd9476f', commit_date=1695401335.0)": { + "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml if setup.py is not found\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n else\n echo \"Neither 'setup.py' nor 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n fi\ndone", + "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + } + }, + "version": 1 +} diff --git a/scratch/scripts/initialize_context_registry.py b/scratch/scripts/initialize_context_registry.py index adbcef7..63d1e54 100644 --- a/scratch/scripts/initialize_context_registry.py +++ b/scratch/scripts/initialize_context_registry.py @@ -406,4 +406,4 @@ # ), # ) -CONTEXT_REGISTRY.save_to_file(Path("scratch/context_registry.json")) +CONTEXT_REGISTRY.save_to_file(Path("scratch/context_registry_init.json")) diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index b212e86..e46b2e9 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -49,6 +49,7 @@ def parse_args() -> argparse.Namespace: help="Directory where the results will be stored.", ) parser.add_argument("--max-workers", type=int, default=8, help="Max parallel builds/runs.") + parser.add_argument("--max-steps", type=int, default=5, help="Number of ReACT steps to use.") parser.add_argument("--max-attempts", type=int, default=3, help="Max attempts per task (build+run).") parser.add_argument("--build-timeout", type=int, default=20 * 60, help="Seconds before aborting a docker build.") parser.add_argument("--run-timeout", type=int, default=15 * 60, help="Seconds before aborting asv run.") diff --git a/src/datasmith/agents/build_agent.py b/src/datasmith/agents/build_agent.py new file mode 100644 index 0000000..8b3e3ae --- /dev/null +++ b/src/datasmith/agents/build_agent.py @@ -0,0 +1,245 @@ +# # file: reactive_docker_builder.py +# from __future__ import annotations + +# import os +# import json +# import shlex +# from typing import Sequence, Dict, Any + +# # --- LangChain / LLMs --- +# from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder, PromptTemplate +# from langchain_core.tools import StructuredTool +# from langchain.agents import AgentExecutor, create_react_agent +# from langchain_core.language_models import BaseLanguageModel + +# from docker.models.containers import Container + +# # Example LLM import (swap to your provider, e.g., langchain_openai, langchain_anthropic, etc.) +# # from langchain_openai import ChatOpenAI + +# # Optional: online docs tool (enabled if TAVILY_API_KEY is set) +# _HAVE_TAVILY = False + +# # Docker SDK: you provide an already-running docker.models.containers.Container +# # Docs for exec_run demux/stream behavior: https://docker-py.readthedocs.io/en/stable/user_guides/multiplex.html +# # and general container API: https://docker-py.readthedocs.io/en/stable/containers.html + + +# # ----------------------------- +# # Docker exec helpers / tools +# # ----------------------------- +# def _exec_in_container(container: Container, cmd: str, workdir: str | None = None, env: dict[str, str] | None = None, +# stream: bool = True, demux: bool = True, timeout: int | None = None) -> dict[str, Any]: +# """ +# Execute a non-interactive bash command inside the container with proper demuxing of stdout/stderr. +# Returns a dict containing exit_code, stdout, stderr. +# """ +# # Use bash -lc so users can chain commands, source envs, etc. +# quoted = shlex.quote(cmd) +# final_cmd = f"/bin/bash -lc {quoted}" + +# # NOTE: container.exec_run supports demux/stream; see docs. +# # We keep TTY disabled so demux works predictably. +# import IPython; IPython.embed() +# res = container.exec_run( +# final_cmd, +# stream=stream, +# demux=demux, +# environment=env, +# workdir=workdir, +# tty=False, +# ) + +# # Aggregate streamed output if requested +# stdout_chunks, stderr_chunks = [], [] +# if stream: +# for out_tup in res.output: +# if demux: +# out, err = out_tup +# if out: +# stdout_chunks.append(out.decode("utf-8", "replace")) +# if err: +# stderr_chunks.append(err.decode("utf-8", "replace")) +# else: +# # single stream +# stdout_chunks.append(out_tup.decode("utf-8", "replace")) +# else: +# if demux: +# out, err = res.output # (stdout, stderr) +# if out: +# stdout_chunks.append(out.decode("utf-8", "replace")) +# if err: +# stderr_chunks.append(err.decode("utf-8", "replace")) +# else: +# stdout_chunks.append(res.output.decode("utf-8", "replace")) + +# stdout = "".join(stdout_chunks) +# stderr = "".join(stderr_chunks) +# exit_code = getattr(res, "exit_code", None) + +# return {"exit_code": exit_code, "stdout": stdout, "stderr": stderr} + + +# def _truncate(s: str, limit: int = 4000) -> str: +# if s is None: +# return "" +# return s if len(s) <= limit else (s[:limit] + f"\n...[truncated {len(s)-limit} chars]...") + + +# def make_bash_tool(container, name: str = "bash") -> StructuredTool: +# """ +# Create a LangChain StructuredTool that executes bash commands in the given Docker container. +# Important: Each call is a fresh non-interactive shell; 'cd' does not persist across calls. +# Always chain with 'cd path && do_the_thing'. +# """ +# def _run_bash(cmd: str) -> str: +# """ +# Run a bash command inside the connected Docker container and return a JSON payload: +# {"exit_code": int, "stdout": "...", "stderr": "..."} +# Keep outputs trimmed to avoid token bloat. +# """ +# result = _exec_in_container(container, cmd, stream=True, demux=True) +# return json.dumps({ +# "exit_code": result["exit_code"], +# "stdout": _truncate(result["stdout"]), +# "stderr": _truncate(result["stderr"]) +# }) + +# return StructuredTool.from_function( +# name=name, +# func=_run_bash, +# description=( +# "Execute a non-interactive bash command inside the target Docker container. " +# "Use this to run build tools, pip, python, make, etc. " +# "IMPORTANT: Shell state is not persistent across calls; if you need to run in a directory, " +# "prefix commands with 'cd && ...'. Return value is JSON with keys exit_code, stdout, stderr." +# ), +# ) + + +# def import_check(container, package: str) -> Dict[str, Any]: +# """ +# Validate that 'python -c \"import {package}\"' works inside the container. +# Returns {ok: bool, exit_code: int, stdout: str, stderr: str} +# """ +# # Prefer a clean check that prints any exception for the agent/user to see. +# cmd = f'python -c "import importlib,sys; ' \ +# f'pkg={json.dumps(package)}; ' \ +# f'print(f\'checking import {{pkg}}...\'); ' \ +# f'importlib.import_module(pkg)"' +# res = _exec_in_container(container, cmd, stream=False, demux=True) +# ok = (res["exit_code"] == 0) +# stdout = res.get("stdout") or "" +# stderr = res.get("stderr") or "" +# return {"ok": ok, "exit_code": res["exit_code"], "stdout": stdout, "stderr": stderr} + + +# # --------------------------------- +# # Agent construction & orchestration +# # --------------------------------- +# def make_react_prompt() -> PromptTemplate: +# """ +# ReAct prompt template compatible with langchain.agents.create_react_agent. +# (Per LangChain docs, the prompt must expose {tools}, {tool_names}, and {agent_scratchpad}.) +# """ +# template = """You are a careful build engineer working **inside a Docker container**. + +# Goal: +# - {goal} + +# Hard stop condition: +# - The task is finished when `python -c "import {package}"` returns exit code 0. + +# Environment constraints: +# - Each tool call runs in a fresh, non-interactive shell; **`cd` does not persist**. If you need a working dir, do `cd && ...`. +# - Prefer standards-based Python packaging (PEP 517/518) with `python -m build` or `pip install .` depending on the project layout. +# - Keep commands idempotent when possible. +# - Be explicit; show the entire command you run. + +# You have access to these tools: +# {tools} + +# Use this strict format: +# Question: restate the next concrete subtask +# Thought: explain briefly what you'll do +# Action: the action to take, must be one of [{tool_names}] +# Action Input: exact input for the action +# Observation: the result +# ...(repeat Thought/Action/Action Input/Observation as needed) +# Thought: I now know the final answer +# Final Answer: a short summary of what was done and why it should satisfy the stop condition + +# Begin! + +# Question: {input} +# Thought:{agent_scratchpad} +# """ +# return PromptTemplate.from_template(template) + + +# def build_react_agent( +# llm: BaseLanguageModel, +# container: Container, +# max_iterations: int = 20, +# ) -> AgentExecutor: +# """ +# Construct a ReAct agent wired to the Docker bash tool and (optionally) a web-docs search tool. +# """ +# tools: list = [make_bash_tool(container)] +# prompt = make_react_prompt() +# agent = create_react_agent(llm, tools, prompt) # (Legacy LangChain ReAct). See docs. +# # NOTE: AgentExecutor supports guards like max_iterations and early_stopping_method. +# executor = AgentExecutor( +# agent=agent, +# tools=tools, +# verbose=True, +# handle_parsing_errors=True, +# max_iterations=max_iterations, +# early_stopping_method="generate", +# return_intermediate_steps=False, +# ) +# return executor + + +# def run_build_agent( +# agent: AgentExecutor, +# problem_description: str, +# package: str, +# ) -> Dict[str, Any]: +# """ +# Run the agent once (internally it may take many tool-calls up to max_iter). +# After it finishes, probe the stop condition inside the container. + +# Returns: { +# "agent_output": str, +# "import_check": {"ok": bool, "exit_code": int, "stdout": str, "stderr": str} +# } +# """ +# user_input = ( +# f"{problem_description}\n" +# f"Target package name: {package}\n" +# f"Remember: The task is finished when `python -c \"import {package}\"` succeeds." +# ) +# # Cap total internal steps safely +# result = agent.invoke( +# { +# "input": user_input, # your composed instructions +# "goal": problem_description, # <— add this +# "package": package, # <— and this +# }, +# config={"configurable": {"thread_id": "build-session-1"}}, +# ) +# # After the agent stops, verify the explicit finish condition: +# # (This ensures we don't consider 'done' unless the import actually works.) +# # We rely on the bash tool's statelessness—no residual shell state. +# # The import check uses python -c import; see Docker exec docs for behavior of exec sessions. +# executor_tools = getattr(agent, "tools", None) # not needed, just illustrative +# # The container reference is captured inside the bash tool; we re-check via helper: +# # (If you need to pass the container explicitly, expose it to this function too.) +# # Here, we assume you still have a reference (e.g., close over it or store globally). +# # For simplicity, require caller to call `import_check` directly and attach to this return. +# return { +# "agent_output": result.get("output", str(result)), +# # Caller should supply the container again for the check: +# # But we can't access it from AgentExecutor. Return a stub and instruct caller to call import_check(). +# } diff --git a/src/datasmith/agents/container_toolbox.py b/src/datasmith/agents/container_toolbox.py new file mode 100644 index 0000000..d219b1f --- /dev/null +++ b/src/datasmith/agents/container_toolbox.py @@ -0,0 +1,332 @@ +from __future__ import annotations + +import json +import logging +import os +import shlex +import textwrap +from dataclasses import dataclass +from typing import Callable + +import docker +from docker.models.containers import Container + +logger = logging.getLogger(__name__) + +_DEFAULT_KEEPALIVE_CMD = "trap : TERM INT; while :; do sleep 2147483647; done" + + +def _bash(cmd: str, *, timeout_s: int | None = None) -> tuple[list[str], dict]: + if timeout_s: + # timeout wraps the shell itself + return ["timeout", f"{int(timeout_s)}s", "/bin/bash", "-lc", cmd], {} + else: + return ["/bin/bash", "-lc", cmd], {} + + +@dataclass +class ExecResult: + rc: int + stdout: str + stderr: str + + +class PersistentContainer: + """ + Creates a long-lived container for interactive 'exec' operations. + The image is assumed to already contain the target repo at the desired commit. + """ + + def __init__( + self, + client: docker.DockerClient, + image: str, + *, + name: str | None = None, + workdir: str | None = None, + env: dict | None = None, + keepalive_cmd: str | None = None, + ) -> None: + self.client = client + self.image = image + self.name = name + self.workdir = workdir + self.env = env or {} + self.keepalive_cmd = keepalive_cmd or _DEFAULT_KEEPALIVE_CMD + self.container: Container | None = None + + def start(self) -> None: + if self.container is not None: + return + # Make the entrypoint /bin/bash to ensure we have a shell for exec. + self.container = self.client.containers.run( + self.image, + command=["trap : TERM INT; while :; do sleep 2147483647; done"], + name=self.name, + working_dir=self.workdir, + environment=self.env, + stdin_open=False, + tty=False, + detach=True, + entrypoint=["/bin/bash", "-lc"], + ) + if self.container is None: + logger.warning("Failed to start container from image %s", self.image) + return + self.container.reload() + if self.container.status != "running": + logs = self.container.logs(tail=50).decode("utf-8", "replace") + raise RuntimeError(f"Container failed to stay up. Status={self.container.status}\n{logs}") + + def stop(self) -> None: + if not self.container: + return + try: + self.container.stop(timeout=3) + finally: + try: + self.container.remove(force=True) + finally: + self.container = None + + def exec(self, cmd: str, *, timeout_s: int | None = 30) -> ExecResult: + if not self.container: + raise RuntimeError("container not started") + args, _ = _bash(cmd, timeout_s=timeout_s) + exec_id = self.client.api.exec_create(self.container.id, args, stdout=True, stderr=True) + out = self.client.api.exec_start(exec_id, stream=False, demux=True) + if isinstance(out, tuple) and len(out) == 2: + stdout_bytes, stderr_bytes = out + else: + # Fallback for engines that don't demux + stdout_bytes, stderr_bytes = out, b"" + insp = self.client.api.exec_inspect(exec_id) + rc = insp.get("ExitCode", 1) + return ExecResult( + rc=rc, + stdout=(stdout_bytes or b"").decode("utf-8", errors="replace"), # pyright: ignore[reportAttributeAccessIssue] + stderr=(stderr_bytes or b"").decode("utf-8", errors="replace"), + ) + + # --- Higher-level helpers --- + + def find_repo_root(self) -> str | None: + """ + Heuristics to locate the repo root inside the container. + Tries git, then common roots, then a bounded 'find'. + """ + # 1) git (fast if .git present) + res = self.exec("git rev-parse --show-toplevel || true") + if res.stdout.strip(): + return res.stdout.strip() + + # 2) check common mount points / conventional roots + candidates = ["/workspace", "/work", "/repo", "/project", "/src", "/opt/src", "/home"] + script = " || ".join([f"[ -e {shlex.quote(p)}/pyproject.toml ] && echo {shlex.quote(p)}" for p in candidates]) + res = self.exec(f"({script}) || true") + if res.stdout.strip(): + return res.stdout.strip() + + # 3) bounded search for pyproject/asv files to infer root + res = self.exec( + textwrap.dedent(""" + set -euo pipefail + root="" + for base in /workspace /work /repo /project /src /opt/src /home /; do + p=$(find "$base" -maxdepth 5 -type f \\( -name pyproject.toml -o -name asv.conf.json \\) 2>/dev/null | head -n1 || true) + if [ -n "$p" ]; then + root=$(dirname "$p"); echo "$root"; exit 0 + fi + done + true + """).strip(), + timeout_s=25, + ) + return res.stdout.strip() or None + + def list_tree(self, root: str, *, max_depth: int = 3, max_items: int = 500) -> list[str]: + cmd = ( + f"cd {shlex.quote(root)} 2>/dev/null && " + f'find . -maxdepth {int(max_depth)} -type f -print 2>/dev/null | sed "s|^\\./||" | head -n {int(max_items)}' + ) + res = self.exec(cmd, timeout_s=20) + return [ln for ln in res.stdout.splitlines() if ln.strip()] + + def read_file(self, path: str, *, max_bytes: int = 256_000) -> str: + # use Python for robust UTF-8 handling across environments + py = textwrap.dedent(f""" + import sys, os, io + p = {path!r} + try: + with open(p, 'rb') as f: + data = f.read({int(max_bytes)}) + sys.stdout.write(data.decode('utf-8', 'replace')) + except Exception as e: + sys.stdout.write("") + sys.stderr.write(str(e)) + """).strip() + res = self.exec(f'python - << "PY"\n{py}\nPY', timeout_s=20) + return res.stdout + + def infer_repo_facts(self, repo_root: str) -> dict: # noqa: C901 + """ + Extracts asv dir, pyproject/setup files, requirements/env files, package name candidates, + and python versions from asv.conf.json (if present). Portable across BusyBox/GNU find. + """ + scan_cmd = textwrap.dedent(f""" + set -e + base={shlex.quote(repo_root)} + [ -d "$base" ] || exit 0 + ( + cd "$base" + # print relative paths; strip leading ./ so Python sees clean relatives + find . -maxdepth 6 -type f \\( \ + -name 'asv.conf.json' -o -name 'pyproject.toml' -o -name 'setup.cfg' -o -name 'setup.py' -o \ + -name 'asv.*.json' -o -name 'requirements.txt' -o -name 'requirements-*.txt' -o \ + -name 'environment.yml' -o -name 'environment.yaml' \ + \\) -print | sed 's|^\\./||' + ) | sort -u + """).strip() + + scan = self.exec(scan_cmd, timeout_s=25) + files = [ln for ln in scan.stdout.splitlines() if ln.strip()] + + def _first(pred: Callable[[str], bool]) -> str | None: + for f in files: + if pred(f): + return f + return None + + asv_conf = _first(lambda p: os.path.basename(p) == "asv.conf.json") + pyproject = _first(lambda p: os.path.basename(p) == "pyproject.toml") + setup_cfg = _first(lambda p: os.path.basename(p) == "setup.cfg") + setup_py = _first(lambda p: os.path.basename(p) == "setup.py") + asv_json_candidates = [ + p for p in files if p != asv_conf and os.path.basename(p).startswith("asv.") and p.endswith(".json") + ] + requirements = [p for p in files if os.path.basename(p).startswith("requirements") and p.endswith(".txt")] + env_files = [p for p in files if os.path.basename(p) in ("environment.yml", "environment.yaml")] + + # Parse names/versions using python in-container (unchanged except for robustness). + py = textwrap.dedent(f""" + import json, os + root = {repo_root!r} + rel_asv_conf = {asv_conf!r} + rel_pyproject = {pyproject!r} + project_name_from_pyproject = None + project_name_from_asv = None + python_versions_from_asv = [] + # parse asv.conf.json + if rel_asv_conf: + try: + with open(os.path.join(root, rel_asv_conf), 'rb') as f: + j = json.load(f) + project_name_from_asv = j.get('project') + pyv = j.get('pythons') or j.get('matrix', {{}}).get('pythons') or [] + python_versions_from_asv = [str(v) for v in pyv] + except Exception: + pass + # parse pyproject name (PEP 621 / Poetry) + if rel_pyproject: + try: + import tomllib # 3.11+ + except Exception: + tomllib = None + try: + p = os.path.join(root, rel_pyproject) + data = open(p, 'rb').read() + if tomllib: + t = tomllib.loads(data.decode('utf-8', 'replace')) + project_name_from_pyproject = ( + t.get('project', {{}}).get('name') + or t.get('tool', {{}}).get('poetry', {{}}).get('name') + ) + else: + import re + m = re.search(r'^\\s*name\\s*=\\s*["\\\']([^"\\\']+)["\\\']', data.decode('utf-8','replace'), re.M) + if m: + project_name_from_pyproject = m.group(1) + except Exception: + pass + print(json.dumps(dict( + project_name_from_pyproject=project_name_from_pyproject, + project_name_from_asv=project_name_from_asv, + python_versions_from_asv=python_versions_from_asv, + ))) + """).strip() + parsed = self.exec(f'python - << "PY"\n{py}\nPY', timeout_s=25) + try: + meta = json.loads(parsed.stdout or "{}") + except Exception: + meta = {} + + # asv_dir should be the directory containing asv.conf.json; if top-level, use repo_root + if asv_conf: + asv_dir_rel = os.path.dirname(asv_conf) + asv_dir = repo_root if asv_dir_rel in ("", ".") else f"{repo_root}/{asv_dir_rel}" + else: + asv_dir = None + + repo_tail = repo_root.strip("/").split("/")[-1] or "repo" + cands = [] + for src in (meta.get("project_name_from_pyproject"), meta.get("project_name_from_asv"), repo_tail): + if src and src not in cands: + cands.append(src) + if src: + for v in (src.replace("-", "_"), src.replace("_", "-")): + if v not in cands: + cands.append(v) + + return { + "repo_root": repo_root, + "asv_dir": asv_dir, + "asv_conf": asv_conf, + "asv_json_candidates": asv_json_candidates[:16], + "pyproject": pyproject, + "setup_cfg": setup_cfg, + "setup_py": setup_py, + "requirements": requirements[:16], + "env_files": env_files[:8], + "project_name_from_pyproject": meta.get("project_name_from_pyproject"), + "project_name_from_asv": meta.get("project_name_from_asv"), + "python_versions_from_asv": meta.get("python_versions_from_asv") or [], + "pkg_candidates": cands[:8], + } + + def try_import(self, cmd_python: str, candidates: list[str]) -> dict: + """ + Run a quick python import test inside the container. + 'cmd_python' can be 'python' or 'micromamba run -n asv_3.11 python', etc. + """ + body = ( + textwrap.dedent(""" + import importlib, sys + names = {names} + for name in names: + try: + m = importlib.import_module(name) + v = getattr(m, "__version__", None) + print("IMPORTED::%s::%s" % (name, v or "unknown")) + sys.exit(0) + except Exception as e: + print("FAILED::%s::%s" % (name, e)) + sys.exit(1) + """) + .format(names=repr(candidates)) + .strip() + ) + res = self.exec(f'{cmd_python} - << "PY"\n{body}\nPY', timeout_s=60) + ok = "IMPORTED::" in res.stdout + succeeded = None + for line in res.stdout.splitlines(): + if line.startswith("IMPORTED::"): + succeeded = line.split("::", 2)[1] + break + return { + "ok": ok, + "tried": candidates, + "succeeded": succeeded, + "stdout": res.stdout[-2000:], + "stderr": res.stderr[-2000:], + "rc": 0 if ok else 1, + } diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index 9c4dd3b..1a6816c 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -11,6 +11,7 @@ import dspy from datasmith.agents.config import configure_agent_backends +from datasmith.agents.tool_executor import ContainerToolExecutor from datasmith.docker.context import BuildResult, ContextRegistry, DockerContext from datasmith.docker.validation import Task, validate_one @@ -71,10 +72,62 @@ class BuildScriptSynthesis(dspy.Signature): ) +class BuildScriptAgentStep(dspy.Signature): + """ + An interactive planner for producing docker_build.sh. It can either: + (A) request a TOOL call (probe_repo, list_tree, read_file, try_import) with JSON args, or + (B) output the final script. + If you need a tool, set next_action to one of: 'probe_repo' | 'list_tree' | 'read_file' | 'try_import' | 'none'. + For read_file, provide JSON like {"path": "...", "max_bytes": 65536}. + For list_tree, provide JSON like {"depth": 2}. + For try_import, provide JSON like {"candidates": ["foo", "bar"]}. + Return docker_build_script ONLY when you're satisfied. + """ + + # Inputs (context) + owner_repo = dspy.InputField(desc="The repository this commit belongs to. E.g. 'scikit-learn/scikit-learn'.") + sha = dspy.InputField(desc="The commit SHA that is currently checked out.") + commit_date = dspy.InputField(desc="The commit date in ISO format, e.g. '2023-10-05T12:34:56Z'.") + stderr_logs = dspy.InputField( + desc="The most recent stderr logs from the last build attempt. Upto ~8k tail-end chars." + ) + stdout_logs = dspy.InputField( + desc="The most recent stdout logs from the last build attempt. Upto ~8k tail-end chars." + ) + failure_more = dspy.InputField( + desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." + ) + last_docker_build_script = dspy.InputField(desc="Previous docker_build.sh script.") + expected_template = dspy.InputField(desc="Stable outer template..") + repo_facts_json = dspy.InputField(desc="JSON of inferred repo facts (paths, candidates, versions).") + toolbelt = dspy.InputField(desc="Human-readable summary of available tools.") + messages_log = dspy.InputField(desc="Transcript of prior tool actions & observations.") + + # Outputs + thought = dspy.OutputField(desc="Brief rationale.") + next_action = dspy.OutputField(desc="One of probe_repo|list_tree|read_file|try_import|none|finish.") + action_input = dspy.OutputField(desc="JSON args for the tool (or empty).") + error_summary = dspy.OutputField(desc="A brief summary of the last build failure, and possible causes.") + resolution_steps = dspy.OutputField(desc="Concrete steps to resolve the failure.") + docker_build_script = dspy.OutputField( + desc="Final executable bash script that successfully builds the project from source." + ) + + class BuildScriptProgram(dspy.Module): def __init__(self) -> None: super().__init__() - self.predict = dspy.Predict(BuildScriptSynthesis) + # self.predict = dspy.Predict(BuildScriptSynthesis) + self.step = dspy.Predict(BuildScriptAgentStep) + + def _toolbelt_text(self) -> str: + return ( + "Tools you can use:\n" + "- probe_repo(): recompute repo facts (asv dir, pyproject, setup, pkg candidates, python versions).\n" + "- list_tree(depth=2): show a trimmed top-level tree for orientation.\n" + "- read_file(path, max_bytes=65536): read a file at this commit.\n" + "- try_import(candidates=[...]): (post-build) quick python import check inside the built image.\n" + ) def forward( self, @@ -86,6 +139,9 @@ def forward( failure_more: str, last_docker_build_script: str, expected_template: str, + repo_facts_json: str, + tool_executor: ContainerToolExecutor, + max_steps: int = 4, ) -> str: logger.info( "DSPy: synthesizing build script for %s@%s (stderr_len=%d, stdout_len=%d, has_last=%s, failure=%s)", @@ -96,18 +152,58 @@ def forward( bool(last_docker_build_script), failure_more, ) - out = self.predict( - owner_repo=owner_repo, - sha=sha, - commit_date=commit_date, - stderr_logs=stderr_logs or "", - stdout_logs=stdout_logs or "", - failure_more=failure_more or "N/A", - last_docker_build_script=last_docker_build_script or "", - expected_template=expected_template, - ) + messages_log = "" + toolbelt = self._toolbelt_text() + iter_script: str | None = None + for step_idx in range(max_steps): + out = self.step( + owner_repo=owner_repo, + sha=sha, + commit_date=commit_date, + stderr_logs=stderr_logs or "", + stdout_logs=stdout_logs or "", + failure_more=failure_more or "N/A", + last_docker_build_script=last_docker_build_script or "", + expected_template=expected_template, + repo_facts_json=repo_facts_json or "{}", + toolbelt=toolbelt, + messages_log=messages_log, + ) + + action = (out.next_action or "").strip().lower() # pyright: ignore[reportAttributeAccessIssue] + action_input = (out.action_input or "").strip() # pyright: ignore[reportAttributeAccessIssue] + if action in ("none", "finish") and (out.docker_build_script or "").strip(): # pyright: ignore[reportAttributeAccessIssue] + iter_script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] + break + + # Tool dispatch + observation = tool_executor.choose_action( + action=action, + action_input=action_input, + ) + if action == "probe_repo": + repo_facts_json = tool_executor.facts_json() + + messages_log += f"\n\n# Step [{step_idx + 1}/{max_steps}]\n# Action: {action}\n# Input: {action_input}\n# Observation:\n{observation[:4000]}" + + # If model already emitted a script, prefer it + if (out.docker_build_script or "").strip(): # pyright: ignore[reportAttributeAccessIssue] + iter_script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] + break + + # out = self.predict( + # owner_repo=owner_repo, + # sha=sha, + # commit_date=commit_date, + # stderr_logs=stderr_logs or "", + # stdout_logs=stdout_logs or "", + # failure_more=failure_more or "N/A", + # last_docker_build_script=last_docker_build_script or "", + # expected_template=expected_template, + # ) # Safety belt: ensure the required fixed template anchors are present. - script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] + # script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] + script = (iter_script or "").strip() logger.debug("DSPy: candidate script preview: %s", _preview(script, 240)) must_haves = ["cd_asv_json_dir()", "micromamba", "for version in $python_versions; do"] ok_template = all(m in script for m in must_haves) @@ -148,6 +244,8 @@ def synthesize_script( stdout_tail: str, building_template: str, failure_more: str, + tool_exec: ContainerToolExecutor, + max_steps: int = 4, ) -> str: logger.info( "synthesize_script: task=%s/%s@%s, last_script=%s", @@ -168,6 +266,9 @@ def synthesize_script( failure_more=failure_more or "N/A", last_docker_build_script=last_script or "", expected_template=building_template, + repo_facts_json=tool_exec.facts_json(), + tool_executor=tool_exec, + max_steps=max_steps, ) script = str(script) logger.info("synthesize_script: script length=%d", len(script)) @@ -183,6 +284,7 @@ def build_once_with_context( *, timeout_s: int, tail_chars: int, + probe: bool = False, pull: bool = False, ) -> BuildResult: logger.info("build_once_with_context: registering context key=%s", image_name) @@ -198,6 +300,7 @@ def build_once_with_context( client=client, image_name=image_name, build_args={"REPO_URL": repo_url, "COMMIT_SHA": sha}, + probe=probe, force=True, timeout_s=timeout_s, tail_chars=tail_chars, @@ -230,10 +333,11 @@ def agent_build_and_validate( assert task.sha is not None, "task.sha must be set" # noqa: S101 other_contexts = context_registry.get_similar(task) logger.info("agent_build_and_validate: found %d similar contexts", len(other_contexts)) - most_similar = other_contexts[0][1] if len(other_contexts) >= 1 else None - if most_similar and most_similar.building_data: - default_building_data = most_similar.building_data + if len(other_contexts) >= 1: + _, most_similar_ctx = other_contexts[0] + default_building_data = most_similar_ctx.building_data else: + _, most_similar_ctx = context_registry.get_default() default_building_data = context_registry["asv/default/default"].building_data logger.info( @@ -246,124 +350,183 @@ def agent_build_and_validate( repo_url = f"https://www.github.com/{task.owner}/{task.repo}" logger.debug("agent_build_and_validate: image_name=%s repo_url=%s", image_name, repo_url) - attempts: list[AttemptRecord] = [] - prior_script = "" # empty on attempt #1 - - # Attempt loop - for i in range(1, max_attempts + 1): - logger.info("agent_build_and_validate: attempt %d/%d", i, max_attempts) - - if i == 1: - failure_more = "N/A" - script = synthesize_script( - program, - task, - prior_script, - stderr_tail="", - stdout_tail="", - building_template=default_building_data, - failure_more=failure_more, - ) - else: - last = attempts[-1].build_result - stderr_tail = (last.stderr_tail if last else "") or "" - stdout_tail = (last.stdout_tail if last else "") or "" - if last and last.rc == 124: - failure_more = "build timeout" - else: - failure_more = f"build failed rc={last.rc}" if last else "build failed" - logger.debug( - "agent_build_and_validate: re-synthesis with last tails (stderr_len=%d, stdout_len=%d, failure=%s)", - len(stderr_tail), - len(stdout_tail), - failure_more, - ) - script = synthesize_script( - program, - task, - attempts[-1].building_data, - stderr_tail=stderr_tail, - stdout_tail=stdout_tail, - building_template=default_building_data, - failure_more=failure_more, - ) - - ctx = DockerContext(building_data=script) - with context_registry.get_lock(): - context_registry.register(image_name, ctx) - - # Save attempt pickle - attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" - _save_pickle(ctx, attempt_pickle) - - # Build - logger.info("agent_build_and_validate: building image '%s'", image_name) - build_res = build_once_with_context( + # build probe. + probe_image_name = image_name.replace("asv/", "asvprobe/") + if not client.images.list(name=probe_image_name): + logger.info("agent_build_and_validate: probe image not found, building probe image") + probe_res = build_once_with_context( client=client, - image_name=image_name, - context=ctx, + image_name=probe_image_name, + context=most_similar_ctx, repo_url=repo_url, sha=task.sha, timeout_s=args.build_timeout, tail_chars=args.tail_chars, + probe=True, + pull=True, ) - attempts.append(AttemptRecord(attempt_idx=i, building_data=script, build_result=build_res)) - - if build_res.ok: - # Save final pickle and then run full validation using your pipeline - final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" - _save_pickle(ctx, final_pickle) - logger.info("agent_build_and_validate: build succeeded; starting validation run") - result = validate_one(task, args, client, context_registry, machine_defaults) - logger.info( - "agent_build_and_validate: validation stage=%s ok=%s rc=%s", - result.get("stage"), - result.get("ok"), - result.get("rc"), + if not probe_res.ok: + logger.warning("agent_build_and_validate: probe build failed; something is wrong with Dockerfile") + raise RuntimeError("probe build failed; check Dockerfile.") + + tool_exec = ContainerToolExecutor( + docker_client=client, + image_name=probe_image_name, + container_name=probe_image_name.replace("/", "-"), + workdir="/workspace/repo/", + ) + + try: + attempts: list[AttemptRecord] = [] + prior_script = "" # empty on attempt #1 + + # Attempt loop + for i in range(1, max_attempts + 1): + logger.info("agent_build_and_validate: attempt %d/%d", i, max_attempts) + + if i == 1: + failure_more = "N/A" + script = synthesize_script( + program, + task, + prior_script, + stderr_tail="", + stdout_tail="", + building_template=default_building_data, + failure_more=failure_more, + tool_exec=tool_exec, + max_steps=args.max_steps, + ) + else: + last = attempts[-1].build_result + stderr_tail = (last.stderr_tail if last else "") or "" + stdout_tail = (last.stdout_tail if last else "") or "" + if last and last.rc == 124: + failure_more = "build timeout" + else: + failure_more = f"build failed rc={last.rc}" if last else "build failed" + logger.debug( + "agent_build_and_validate: re-synthesis with last tails (stderr_len=%d, stdout_len=%d, failure=%s)", + len(stderr_tail), + len(stdout_tail), + failure_more, + ) + script = synthesize_script( + program, + task, + attempts[-1].building_data, + stderr_tail=stderr_tail, + stdout_tail=stdout_tail, + building_template=default_building_data, + failure_more=failure_more, + tool_exec=tool_exec, + max_steps=args.max_steps, + ) + + ctx = DockerContext(building_data=script) + with context_registry.get_lock(): + context_registry.register(image_name, ctx) + + # Save attempt pickle + attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" + _save_pickle(ctx, attempt_pickle) + + # Build + logger.info("agent_build_and_validate: building image '%s'", image_name) + build_res = build_once_with_context( + client=client, + image_name=image_name, + context=ctx, + repo_url=repo_url, + sha=task.sha, + timeout_s=args.build_timeout, + tail_chars=args.tail_chars, + ) + attempts.append(AttemptRecord(attempt_idx=i, building_data=script, build_result=build_res)) + + if build_res.ok: + # import_works = False + # import_check_res = None + # try: + # import_check_res = tool_exec.import_check(cmd_python="python") + # logger.info( + # "agent_build_and_validate: import_check ok=%s candidates=%s", + # import_check_res.get("ok"), + # import_check_res.get("candidates"), + # ) + # except Exception as e: + # logger.warning("agent_build_and_validate: import_check error: %s", e, exc_info=True) + + # import_works = import_check_res.get("ok") if import_check_res else False + # if not import_works and import_check_res: + # # modify build_res to include import_check result + # build_res.stderr_tail = ( + # (build_res.stderr_tail or "") + "\n" + (import_check_res.get("stderr_tail") or "") + # ) + # build_res.stdout_tail = ( + # (build_res.stdout_tail or "") + "\n" + (import_check_res.get("stdout_tail") or "") + # ) + # elif not import_works: + # build_res.stderr_tail = ( + # (build_res.stderr_tail or "") + "\n" + "[import_check] failed with unknown error" + # ) + + # Save final pickle and then run full validation using your pipeline + final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" + _save_pickle(ctx, final_pickle) + logger.info("agent_build_and_validate: build succeeded; starting validation run") + result = validate_one(task, args, client, context_registry, machine_defaults) + logger.info( + "agent_build_and_validate: validation stage=%s ok=%s rc=%s", + result.get("stage"), + result.get("ok"), + result.get("rc"), + ) + + result["attempts"] = [ + { + "attempt": a.attempt_idx, + "ok": (a.build_result.ok if a.build_result else False), + "rc": (a.build_result.rc if a.build_result else None), + "stderr_tail": (a.build_result.stderr_tail if a.build_result else ""), + "stdout_tail": (a.build_result.stdout_tail if a.build_result else ""), + } + for a in attempts + ] + result["context_pickle"] = str(final_pickle) + return result + + # otherwise iterate with new logs + logger.warning( + "agent_build_and_validate: attempt %d failed (rc=%s). Iterating if attempts remain.", + i, + (build_res.rc if build_res else "unknown"), ) - result["attempts"] = [ + # All attempts failed + + last = attempts[-1].build_result + logger.error("agent_build_and_validate: all attempts failed for %s", image_name) + return { + "owner": task.owner, + "repo": task.repo, + "sha": task.sha, + "image_name": image_name, + "stage": "build", + "ok": False, + "rc": (last.rc if last else 1), + "duration_s": (last.duration_s if last else None), + "stderr_tail": (last.stderr_tail if last else ""), + "stdout_tail": (last.stdout_tail if last else ""), + "attempts": [ { "attempt": a.attempt_idx, "ok": (a.build_result.ok if a.build_result else False), "rc": (a.build_result.rc if a.build_result else None), - "stderr_tail": (a.build_result.stderr_tail if a.build_result else ""), - "stdout_tail": (a.build_result.stdout_tail if a.build_result else ""), } for a in attempts - ] - result["context_pickle"] = str(final_pickle) - return result - - # otherwise iterate with new logs - logger.warning( - "agent_build_and_validate: attempt %d failed (rc=%s). Iterating if attempts remain.", - i, - (build_res.rc if build_res else "unknown"), - ) - - # All attempts failed - - last = attempts[-1].build_result - logger.error("agent_build_and_validate: all attempts failed for %s", image_name) - return { - "owner": task.owner, - "repo": task.repo, - "sha": task.sha, - "image_name": image_name, - "stage": "build", - "ok": False, - "rc": (last.rc if last else 1), - "duration_s": (last.duration_s if last else None), - "stderr_tail": (last.stderr_tail if last else ""), - "stdout_tail": (last.stdout_tail if last else ""), - "attempts": [ - { - "attempt": a.attempt_idx, - "ok": (a.build_result.ok if a.build_result else False), - "rc": (a.build_result.rc if a.build_result else None), - } - for a in attempts - ], - "files": [], - } + ], + "files": [], + } + finally: + tool_exec.shutdown() diff --git a/src/datasmith/agents/tool_executor.py b/src/datasmith/agents/tool_executor.py new file mode 100644 index 0000000..c883760 --- /dev/null +++ b/src/datasmith/agents/tool_executor.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +import json +import logging +from dataclasses import dataclass + +import docker + +from datasmith.agents.container_toolbox import PersistentContainer + +logger = logging.getLogger(__name__) + + +@dataclass +class ContainerToolExecutor: + """ + Wires DSPy 'tools' to a persistent container. + """ + + docker_client: docker.DockerClient + image_name: str + container_name: str | None = None + workdir: str | None = None + env: dict | None = None + + def __post_init__(self) -> None: + self._pc = PersistentContainer( + client=self.docker_client, + image=self.image_name, + name=self.container_name, + workdir=self.workdir, + env=self.env, + ) + self._pc.start() + self._repo_root = self._pc.find_repo_root() + self._facts_json = json.dumps(self._pc.infer_repo_facts(self._repo_root or "/"), indent=2) + + def shutdown(self) -> None: + try: + self._pc.stop() + except Exception: + logger.warning("Error stopping container", exc_info=True) + pass + + # ---- DSPy tool entry points ---- + + def choose_action(self, action: str, action_input: str) -> str: + observation = "" + try: + if action == "probe_repo": + facts_json = self.exec_probe_repo() + observation = f"[probe_repo] OK\n{facts_json[:2000]}" + # repo_facts_json = facts_json # refresh for next step + self._facts_json = facts_json + elif action == "list_tree": + observation = self.exec_list_tree(action_input) + elif action == "read_file": + observation = self.exec_read_file(action_input) + elif action == "try_import": + observation = self.exec_try_import(action_input) + else: + observation = f"[noop] Unknown action '{action}'" + except Exception as e: + observation = f"[tool_error] {type(e).__name__}: {e}" + return observation + + def exec_probe_repo(self) -> str: + self._repo_root = self._pc.find_repo_root() + facts = self._pc.infer_repo_facts(self._repo_root or "/") + self._facts_json = json.dumps(facts, indent=2) + return self._facts_json + + def exec_list_tree(self, action_input: str) -> str: + root = self._repo_root or "/" + items = self._pc.list_tree(root, max_depth=3, max_items=600) + return json.dumps({"repo_root": root, "files": items[:120]}, indent=2) + + def exec_read_file(self, action_input: str) -> str: + try: + args = json.loads(action_input or "{}") + except Exception: + args = {} + path = args.get("path") + max_bytes = int(args.get("max_bytes", 64_000)) + if not path: + return "[read_file] missing 'path'" + if not path.startswith("/"): + # interpret relative to repo_root + path = f"{self._repo_root or '/'}" + ("" if path.startswith("/") else "/") + path + body = self._pc.read_file(path, max_bytes=max_bytes) + if not body: + return f"[read_file] empty or not found: {path}" + return f"--- BEGIN {path} ---\n{body}\n--- END {path} ---" + + def exec_try_import(self, action_input: str) -> str: + try: + args = json.loads(action_input or "{}") + except Exception: + args = {} + cands = args.get("candidates") or [] + cmd_py = args.get("python", "python") + res = self._pc.try_import(cmd_py, cands) + return json.dumps(res, indent=2) + + # convenience for callers outside DSPy loop + def facts_json(self) -> str: + self._facts_json = json.dumps(self._pc.infer_repo_facts(self._repo_root or "/"), indent=2) + return self._facts_json + + def import_check(self, cmd_python: str) -> dict: + facts = self._pc.infer_repo_facts(self._repo_root or "/") + pkg_candidates = facts.get("pkg_candidates", []) or [] + v = next(iter(facts.get("python_versions_from_asv", [])), None) + if v: + env = f"asv_{v}" + cmd_python = f"micromamba run -n {env} python" + return self._pc.try_import(cmd_python, pkg_candidates) + return self._pc.try_import(cmd_python, pkg_candidates) diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index 95434a3..63528e2 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -39,6 +39,7 @@ class Task: repo: str sha: str | None = None commit_date: float = 0.0 + kind: str = "asv" class DockerContext: @@ -53,25 +54,34 @@ class DockerContext: default_dockerfile_loc = Path(__file__).parent / "Dockerfile" default_entrypoint_loc = Path(__file__).parent / "entrypoint.sh" default_builder_loc = Path(__file__).parent / "docker_build.sh" + default_probe_loc = Path(__file__).parent / "probe_build.sh" dockerfile_data: str entrypoint_data: str building_data: str + probing_data: str def __init__( - self, building_data: str | None = None, dockerfile_data: str | None = None, entrypoint_data: str | None = None - ): + self, + building_data: str | None = None, + dockerfile_data: str | None = None, + entrypoint_data: str | None = None, + probing_data: str | None = None, + ) -> None: if building_data is None: building_data = self.default_builder_loc.read_text() if dockerfile_data is None: dockerfile_data = self.default_dockerfile_loc.read_text() if entrypoint_data is None: entrypoint_data = self.default_entrypoint_loc.read_text() + if probing_data is None: + probing_data = self.default_probe_loc.read_text() self.building_data = building_data self.dockerfile_data = dockerfile_data self.entrypoint_data = entrypoint_data + self.probing_data = probing_data - def build_tarball_stream(self) -> io.BytesIO: + def build_tarball_stream(self, probe: bool = False) -> io.BytesIO: tar_stream = io.BytesIO() with tarfile.open(fileobj=tar_stream, mode="w") as tar: # Add Dockerfile @@ -88,7 +98,7 @@ def build_tarball_stream(self) -> io.BytesIO: tar.addfile(entrypoint_info, io.BytesIO(entrypoint_data)) # Add docker_build.sh - building_data = self.building_data.encode("utf-8") + building_data = self.probing_data.encode("utf-8") if probe else self.building_data.encode("utf-8") builder_info = tarfile.TarInfo(name="docker_build.sh") builder_info.size = len(building_data) builder_info.mode = 0o755 # Make it executable @@ -99,7 +109,12 @@ def build_tarball_stream(self) -> io.BytesIO: return tar_stream def build_container( - self, client: docker.DockerClient, image_name: str, build_args: dict[str, str], force: bool = False + self, + client: docker.DockerClient, + image_name: str, + build_args: dict[str, str], + force: bool = False, + probe: bool = False, ) -> None: """Builds the Docker image if it does not exist or if force is True.""" image_exists = False @@ -122,7 +137,7 @@ def build_container( logger.info("$ docker build -t %s src/datasmith/docker/ --build-arg %s", image_name, build_args_str) try: client.images.build( - fileobj=self.build_tarball_stream(), + fileobj=self.build_tarball_stream(probe=probe), custom_context=True, tag=image_name, buildargs=build_args, @@ -140,6 +155,7 @@ def build_container_streaming( # noqa: C901 client: docker.DockerClient, image_name: str, build_args: dict[str, str], + probe: bool = False, *, force: bool = False, delete_img: bool = False, @@ -176,7 +192,7 @@ def build_container_streaming( # noqa: C901 logger.info("Docker image '%s' not found locally. Building.", image_name) # Streamed build via low-level API for better control - tar_stream = self.build_tarball_stream() + tar_stream = self.build_tarball_stream(probe=probe) stdout_buf: deque[str] = deque(maxlen=2000) # chunk-tail buffers stderr_buf: deque[str] = deque(maxlen=2000) @@ -307,32 +323,47 @@ def __init__(self, registry: dict[Task, DockerContext] | None = None, default_co self.registry = registry self._lock = threading.Lock() - if "default" not in self.registry: - if default_context is None: - default_context = DockerContext() - self.registry[Task(owner="default", repo="default", sha=None)] = default_context - logger.debug("Default Docker context initialized.") + if default_context is None: + default_context = DockerContext() + + # ensure a default context for BOTH namespaces + for k in ("asv", "asvprobe"): + t = Task(owner="default", repo="default", sha=None, kind=k) + if t not in self.registry: + self.registry[t] = default_context + logger.debug("Default Docker contexts initialized (asv + asvprobe).") + + def get_default(self, kind: str = "asv") -> tuple[Task, DockerContext]: + task = Task(owner="default", repo="default", sha=None, kind=kind) + return task, self.registry[task] def get_lock(self) -> threading.Lock: return self._lock def parse_key(self, key: str) -> Task: - """Parse a string key into a Task object.""" - if not key.startswith("asv/") and not key.startswith("asv/default"): - raise ValueError("Key must start with 'asv/' or 'asv/default'") - - # Special "default" handling: e.g. "asv/default-" - if key.startswith("asv/default"): + """Parse a string key into a Task object (now preserving 'asv' vs 'asvprobe').""" + if not ( + key.startswith("asv/") + or key.startswith("asv/default") + or key.startswith("asvprobe/") + or key.startswith("asvprobe/default") + ): + raise ValueError("Key must start with 'asv/' or 'asv/default' or 'asvprobe/' or 'asvprobe/default'") + + # Handle defaults like "asv/default-" and "asvprobe/default-" + if key.startswith("asv/default") or key.startswith("asvprobe/default"): + kind = "asvprobe" if key.startswith("asvprobe/") else "asv" parts = key.split("-") repo = parts[-1] if len(parts) > 2 else "default" - return Task(owner="default", repo=repo, sha=None) + return Task(owner="default", repo=repo, sha=None, commit_date=0.0, kind=kind) parts = key.split("/") - if parts[0] != "asv" or not (3 <= len(parts) <= 4): - raise ValueError("Key must be in the format 'asv/owner/repo' or 'asv/owner/repo/sha'") - owner, repo = parts[1], parts[2] + if parts[0] not in ("asv", "asvprobe") or not (3 <= len(parts) <= 4): + raise ValueError("Key must be 'asv/owner/repo[/sha]' or 'asvprobe/owner/repo[/sha]'") + + kind, owner, repo = parts[0], parts[1], parts[2] sha = None if len(parts) != 4 else parts[3] - # Compute commit date if we have a sha; otherwise 0.0 + date_unix = 0.0 if sha: try: @@ -344,7 +375,7 @@ def parse_key(self, key: str) -> Task: logger.warning("Failed to fetch commit info for %s/%s@%s: %s", owner, repo, sha, exc) date_unix = 0.0 - return Task(owner=owner, repo=repo, sha=sha, commit_date=date_unix) + return Task(owner=owner, repo=repo, sha=sha, commit_date=date_unix, kind=kind) def register(self, key: str | Task, context: DockerContext) -> None: """Register a new Docker context.""" @@ -365,15 +396,19 @@ def get(self, key: str | Task) -> DockerContext: if isinstance(key, str): key = self.parse_key(key) + # exact match first if key.sha is not None and key in self.registry: logger.debug(f"Found exact context for key '{key}'.") return self.registry[key] - elif Task(owner=key.owner, repo=key.repo, sha=None) in self.registry: - candidate = Task(owner=key.owner, repo=key.repo, sha=None) - logger.debug(f"Found fallback context '{candidate}' for key '{key}'.") - return self.registry[candidate] - logger.info(f"No context found for key '{key}'. Using default context.") - return self.registry[Task(owner="default", repo="default", sha=None)] + + # owner/repo base (same namespace!) + base = Task(owner=key.owner, repo=key.repo, sha=None, kind=key.kind) + if base in self.registry: + logger.debug(f"Found fallback context '{base}' for key '{key}'.") + return self.registry[base] + + logger.info(f"No context found for key '{key}'. Using default context for namespace '{key.kind}'.") + return self.registry[Task(owner="default", repo="default", sha=None, kind=key.kind)] def get_similar(self, key: str | Task) -> list[tuple[Task, DockerContext]]: # noqa: C901 """ @@ -391,30 +426,25 @@ def get_similar(self, key: str | Task) -> list[tuple[Task, DockerContext]]: # n results: list[tuple[Task, DockerContext]] = [] seen: set[Task] = set() - # 1) Exact match first (if present) + # 1) Exact match (if present) if key in self.registry: results.append((key, self.registry[key])) seen.add(key) - # 2) Other shas for the same owner/repo + # 2) Other SHAs for same owner/repo *in the same namespace* candidates: list[tuple[Task, DockerContext]] = [] for t, ctx in self.registry.items(): if t in seen: continue - if t.owner == key.owner and t.repo == key.repo and t.sha is not None: + if t.kind == key.kind and t.owner == key.owner and t.repo == key.repo and t.sha is not None: candidates.append((t, ctx)) - # Sort candidates: - # - By commit-date proximity if key has a (sha, commit_date) - # - Otherwise alphabetically by sha has_valid_commit_date = getattr(key, "sha", None) is not None and getattr(key, "commit_date", None) is not None - if has_valid_commit_date: def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: t, _ = item cand_cd = getattr(t, "commit_date", None) - # Missing commit_date gets sorted to the end. if cand_cd is None: return (float("inf"), str(t.sha)) try: @@ -431,8 +461,8 @@ def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: results.append((t, ctx)) seen.add(t) - # 3) Base owner/repo (sha=None) at the end, if present and not already added - base = Task(owner=key.owner, repo=key.repo, sha=None) + # 3) Base owner/repo for the same namespace + base = Task(owner=key.owner, repo=key.repo, sha=None, kind=key.kind) if base in self.registry and base not in seen: results.append((base, self.registry[base])) diff --git a/src/datasmith/docker/probe_build.sh b/src/datasmith/docker/probe_build.sh new file mode 100644 index 0000000..df7ecb0 --- /dev/null +++ b/src/datasmith/docker/probe_build.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# probe build.sh is equivalent to docker_build.sh but it +# does not install the package in the created envs. +# Instead, it prepares the envs and copies a modified asv conf +# to /output/$COMMIT_SHA/$PYTHON_VERSION/asv.*.json +# which can then be used to run the benchmarks in a separate step. +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + + if [[ -n "$match" ]]; then + local dir + dir=$(dirname "$match") + cd "$dir" || echo "Failed to change directory to $dir" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + fi +} +eval "$(micromamba shell hook --shell=bash)" +micromamba activate base + +ROOT_PATH=${PWD} +cd_asv_json_dir || exit 1 +CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") +if [[ -z "$CONF_NAME" ]]; then + echo "No 'asv.*.json' file found in current directory or subdirectories." + exit 1 +fi +python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") +for version in $python_versions; do + python -c "import asv, os, pathlib +path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') +path.mkdir(parents=True, exist_ok=True) + +config = asv.config.Config.load('$CONF_NAME') +config.results_dir = str(path / 'results') +config.html_dir = str(path / 'html') + +asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) +asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) +" + micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv + micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME + micromamba run -n "asv_${version}" pip install meson-python cython +done diff --git a/uv.lock b/uv.lock index c06bb3c..19b8e4d 100644 --- a/uv.lock +++ b/uv.lock @@ -1,8 +1,9 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9, <4.0" resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", "python_full_version < '3.10'", @@ -259,11 +260,11 @@ wheels = [ [[package]] name = "async-timeout" -version = "5.0.1" +version = "4.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345, upload-time = "2023-08-10T16:35:56.907Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721, upload-time = "2023-08-10T16:35:55.203Z" }, ] [[package]] @@ -535,7 +536,8 @@ name = "click" version = "8.2.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -739,7 +741,8 @@ name = "contourpy" version = "1.3.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", ] dependencies = [ @@ -1201,7 +1204,8 @@ name = "dspy" version = "3.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -1463,7 +1467,8 @@ name = "fsspec" version = "2025.7.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -1771,7 +1776,8 @@ name = "ipython" version = "9.4.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", ] dependencies = [ @@ -1938,7 +1944,8 @@ name = "json-repair" version = "0.49.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -2122,7 +2129,8 @@ name = "kiwisolver" version = "1.4.9" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -2260,7 +2268,8 @@ name = "litellm" version = "1.75.8" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -2322,7 +2331,8 @@ name = "markdown-it-py" version = "4.0.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -2470,7 +2480,8 @@ name = "matplotlib" version = "3.10.5" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -2909,7 +2920,8 @@ name = "numpy" version = "2.3.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" } @@ -3115,7 +3127,8 @@ name = "pandas-stubs" version = "2.3.0.250703" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -4050,7 +4063,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.4" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -4058,9 +4071,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -4080,7 +4093,8 @@ name = "rich" version = "13.7.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", "python_full_version == '3.10.*'", ] @@ -4431,7 +4445,8 @@ name = "scipy" version = "1.16.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", "python_full_version == '3.11.*'", ] dependencies = [ From 29daac83ee624db1be0b67a74cbd0941a973d3ec Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Sun, 31 Aug 2025 00:21:43 +0000 Subject: [PATCH 19/20] small errors --- src/datasmith/agents/container_toolbox.py | 61 +++++++++++++++++++---- src/datasmith/agents/context_synthesis.py | 15 ++++-- 2 files changed, 60 insertions(+), 16 deletions(-) diff --git a/src/datasmith/agents/container_toolbox.py b/src/datasmith/agents/container_toolbox.py index d219b1f..d9bd644 100644 --- a/src/datasmith/agents/container_toolbox.py +++ b/src/datasmith/agents/container_toolbox.py @@ -9,6 +9,7 @@ from typing import Callable import docker +from docker.errors import APIError, NotFound from docker.models.containers import Container logger = logging.getLogger(__name__) @@ -59,17 +60,42 @@ def start(self) -> None: if self.container is not None: return # Make the entrypoint /bin/bash to ensure we have a shell for exec. - self.container = self.client.containers.run( - self.image, - command=["trap : TERM INT; while :; do sleep 2147483647; done"], - name=self.name, - working_dir=self.workdir, - environment=self.env, - stdin_open=False, - tty=False, - detach=True, - entrypoint=["/bin/bash", "-lc"], - ) + # if this command fails due to a docker.errors.APIError, then rerun the command after stopping the docker + # container with the same name (if it exists) + try: + self.container = self.client.containers.run( + self.image, + command=["trap : TERM INT; while :; do sleep 2147483647; done"], + name=self.name, + working_dir=self.workdir, + environment=self.env, + stdin_open=False, + tty=False, + detach=True, + entrypoint=["/bin/bash", "-lc"], + ) + except APIError as e: + if "Conflict" in str(e) and self.name: + logger.warning("Container name conflict, trying to remove existing container %s.", self.name) + try: + old_container = self.client.containers.get(self.name) + old_container.stop(timeout=3) + old_container.remove(force=True) + except NotFound: + pass + self.container = self.client.containers.run( + self.image, + command=["trap : TERM INT; while :; do sleep 2147483647; done"], + name=self.name, + working_dir=self.workdir, + environment=self.env, + stdin_open=False, + tty=False, + detach=True, + entrypoint=["/bin/bash", "-lc"], + ) + else: + raise if self.container is None: logger.warning("Failed to start container from image %s", self.image) return @@ -330,3 +356,16 @@ def try_import(self, cmd_python: str, candidates: list[str]) -> dict: "stderr": res.stderr[-2000:], "rc": 0 if ok else 1, } + + +if __name__ == "__main__": + logging.basicConfig(level=logging.DEBUG) + client = docker.from_env() + img_name = "asvprobe/textualize/rich/1de94713811101702b8fcf283c64d1a5de5a8213" + pc = PersistentContainer( + client, img_name, name=img_name.replace("/", "-").replace(":", "-"), workdir="/workspace/repo" + ) + import IPython + + IPython.embed() + pc.stop() diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index 1a6816c..9508b07 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -186,10 +186,11 @@ def forward( messages_log += f"\n\n# Step [{step_idx + 1}/{max_steps}]\n# Action: {action}\n# Input: {action_input}\n# Observation:\n{observation[:4000]}" - # If model already emitted a script, prefer it - if (out.docker_build_script or "").strip(): # pyright: ignore[reportAttributeAccessIssue] - iter_script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] - break + # Don't prefer build_script until model is completely done with it. + # # If model already emitted a script, prefer it + # if (out.docker_build_script or "").strip(): # pyright: ignore[reportAttributeAccessIssue] + # iter_script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] + # break # out = self.predict( # owner_repo=owner_repo, @@ -338,7 +339,11 @@ def agent_build_and_validate( default_building_data = most_similar_ctx.building_data else: _, most_similar_ctx = context_registry.get_default() - default_building_data = context_registry["asv/default/default"].building_data + default_building_data = context_registry["asvprobe/default/default"].building_data + + import IPython + + IPython.embed() logger.info( "agent_build_and_validate: start for %s/%s@%s (max_attempts=%d)", task.owner, task.repo, task.sha, max_attempts From 72fd7b8c71fa992f5c58d210b63fc3bd6934a0a3 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 1 Sep 2025 02:33:00 +0000 Subject: [PATCH 20/20] log of updates. Dockerfile revamp, pipeflush previous layers --- Dockerfile | 63 - README.md | 33 +- scratch/context_registry.json | 1987 +---------------- scratch/context_registry_updated.json | 1950 ---------------- scratch/scripts/collect_and_filter_commits.py | 175 ++ scratch/scripts/collect_commits.py | 12 +- scratch/scripts/collect_perf_commits.py | 51 + .../scripts/initialize_context_registry.py | 1030 ++++++--- scratch/scripts/synthesize_contexts.py | 9 +- src/datasmith/__init__.py | 4 - src/datasmith/agents/config.py | 13 +- src/datasmith/agents/container_toolbox.py | 19 +- src/datasmith/agents/context_synthesis.py | 279 ++- src/datasmith/agents/perf_judge.py | 58 +- src/datasmith/agents/tool_executor.py | 16 + src/datasmith/docker/Dockerfile | 33 +- src/datasmith/docker/context.py | 349 ++- src/datasmith/docker/docker_build.sh | 42 - src/datasmith/docker/docker_build_env.sh | 547 +++++ src/datasmith/docker/docker_build_pkg.sh | 108 + src/datasmith/docker/probe_build.sh | 46 - src/datasmith/docker/validation.py | 25 +- .../execution/collect_commits_offline.py | 225 +- src/datasmith/execution/utils.py | 20 + 24 files changed, 2314 insertions(+), 4780 deletions(-) delete mode 100644 Dockerfile delete mode 100644 scratch/context_registry_updated.json create mode 100644 scratch/scripts/collect_and_filter_commits.py create mode 100644 scratch/scripts/collect_perf_commits.py delete mode 100644 src/datasmith/docker/docker_build.sh create mode 100644 src/datasmith/docker/docker_build_env.sh create mode 100644 src/datasmith/docker/docker_build_pkg.sh delete mode 100644 src/datasmith/docker/probe_build.sh diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index d317119..0000000 --- a/Dockerfile +++ /dev/null @@ -1,63 +0,0 @@ -FROM buildpack-deps:jammy - -ARG REPO_URL -ARG COMMIT_SHA -ARG BUILD_SCRIPT # A build script with custom installation commands provided by the user -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - curl git build-essential jq && \ - rm -rf /var/lib/apt/lists/* - -RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \ - | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba - -ENV MAMBA_ROOT_PREFIX=/opt/conda \ - PATH=/opt/conda/bin:$PATH \ - MAMBA_DOCKERFILE_ACTIVATE=1 \ - OPENBLAS_NUM_THREADS=1 \ - MKL_NUM_THREADS=1 \ - OMP_NUM_THREADS=1 - -RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ - python=3.10 \ - git asv pyperf mamba conda libmambapy jq && \ - micromamba clean --all --yes - -RUN mkdir -p /workspace /output -WORKDIR /workspace - -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -RUN git clone "${REPO_URL}" repo && \ - cd repo && \ - git checkout "${COMMIT_SHA}" && \ - \ - CONF_FILE=$(find . -type f -name "asv.*.json" | head -n 1) && \ - if [[ -z "${CONF_FILE}" ]]; then \ - echo "❌ No asv.*.json found." && exit 1; \ - fi && \ - echo "✅ Using ASV config: ${CONF_FILE}" && \ - \ -PY_VERS=$(echo "import json, pathlib; \ -cfg = pathlib.Path('${CONF_FILE}').read_text(); \ -data = json.loads(cfg); \ -vers = data.get('pythons') or data.get('python') or []; \ -print(' '.join(dict.fromkeys(vers)))" | python -) && \ - if [[ -z "${PY_VERS}" ]]; then \ - echo "❌ No Python versions declared in ${CONF_FILE}" && exit 1; \ - fi && \ - echo "🐍 Creating Conda envs for: ${PY_VERS}" && \ - \ - for v in ${PY_VERS}; do \ - micromamba create -y -n "asv_${v}" -c conda-forge \ - python=${v} git mamba conda "libmambapy<=1.9.9"; \ - done - -WORKDIR /workspace/repo - -RUN echo "${BUILD_SCRIPT}" > /workspace/repo/docker_build.sh && \ - chmod +x /workspace/repo/docker_build.sh && \ - /workspace/repo/docker_build.sh - -ENTRYPOINT ["/entrypoint.sh"] diff --git a/README.md b/README.md index efb3b45..3a831e9 100644 --- a/README.md +++ b/README.md @@ -145,13 +145,13 @@ To run the script, you need to have a GitHub token with `repo` and `read:org` pe The scraper can be run using the following command: ```bash $ python scratch/scripts/scrape_repositories.py \ - --outfile scratch/artifacts/processed/repos_discovered.csv \ - --min-stars 500 \ - --filtered-outfile scratch/artifacts/processed/repos_valid.csv + --outfile scratch/artifacts/pipeflush/repos_discovered.csv \ + --min-stars 100 \ + --filtered-outfile scratch/artifacts/pipeflush/repos_valid.csv # Writes scratch/artifacts/processed/repos_discovered.csv and scratch/artifacts/processed/repos_valid.csv ``` -The `scratch/artifacts/processed/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / has atleast 500 stars / pass other sanity checks. We found ~700 filtered repositories for this dataset. +The `scratch/artifacts/processed/repos_valid.csv` file contains a subset of the repositories that aren't forks / reuploads / has atleast {min-stars} stars / pass other sanity checks. We found ~700 filtered repositories for this dataset. ### 4. Collect relevant commits for all repositories @@ -159,18 +159,27 @@ The `scratch/artifacts/processed/repos_valid.csv` file contains a subset of the Given the list of repositories, we find the subset of commits that have already been closed and merged into the main branch (the top 5000 PRs, sorted by popularity). We use the `collect_commits.py` script to do this. The `filter_commits.py` script then filters out those commits that primarily modified the benchmarking files (e.g. `asv.conf.json`) or were not relevant to the benchmarks (e.g. documentation changes). The script also limits the number of repositories to a maximum of 350 to ensure we don't burden the GitHub API with too many requests. The scripts can be run as follows: ```bash -$ python scratch/scripts/collect_commits.py \ - --dashboards scratch/artifacts/raw/repos_valid.csv \ - --outfile scratch/artifacts/raw/commits_all.jsonl \ - --max-pages 50 -$ python scratch/scripts/filter_commits.py \ - --filtered-benchmarks-pth scratch/artifacts/raw/repos_valid.csv \ - --merged-commits-pth scratch/artifacts/raw/commits_all.jsonl \ - --output-pth scratch/artifacts/raw/commits_filtered.jsonl \ +# $ python scratch/scripts/collect_commits.py \ +# --dashboards scratch/artifacts/raw/repos_valid.csv \ +# --outfile scratch/artifacts/raw/commits_all.jsonl \ +# --max-pages 50 + +# Needs to be a parquet file because the filtered commits are often very large. +$ python scratch/scripts/collect_and_filter_commits.py \ + --filtered-benchmarks-pth scratch/artifacts/pipeflush/repos_valid.csv \ + --output-pth scratch/artifacts/pipeflush/commits_filtered.parquet \ --max-repos 350 \ --threads 8 \ --procs 8 + +$ python scratch/scripts/collect_perf_commits.py \ + --commits scratch/artifacts/pipeflush/commits_filtered.parquet \ + --outfile scratch/artifacts/pipeflush/commits_perfonly.json \ + --max-workers 16 +``` + + # Build contexts for all commits. Each context is a (repo, commit) pair with an associated build_env.sh script to install dependencies. Some reasons a context might fail to build (and get filtered out): # 1. Commit couldn't be checked out # 2. Commit didn't have an asv.conf.json file diff --git a/scratch/context_registry.json b/scratch/context_registry.json index 51e1752..e947916 100644 --- a/scratch/context_registry.json +++ b/scratch/context_registry.json @@ -1,1950 +1,47 @@ { "contexts": { - "Task(owner='apache', repo='arrow', sha='3d6d5817313920abc71c854828d95b63b2562938', commit_date=1726645863.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='apache', repo='arrow', sha='77f099fb5c324afc8ee38cda4976bf20a08e7a4a', commit_date=1668536482.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to wheel if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='arviz-devs', repo='arviz', sha='904129035bb29d1316833cf6f5f1b5ccf69973e3', commit_date=1577571349.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='arviz-devs', repo='arviz', sha='d58fd616bdbf2f269ca66d293428f14b97064946', commit_date=1569629064.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='asdf-format', repo='asdf', sha='8d342d36794f92db7b14a7a6f1415ff5d65fed9e', commit_date=1701819981.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='asdf-format', repo='asdf', sha='8e7fe6cab33649cb55fd5cdcac6cca77d9e9453c', commit_date=1698664980.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='astropy', repo='astropy', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\nROOT_PATH=${PWD}\ngit clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install -e . scipy matplotlib\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='bjodah', repo='chempy', sha='10bdaa5a1d128959ec10128246d977fd137c9671', commit_date=1444135786.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" python setup.py sdist bdist_wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='bjodah', repo='chempy', sha='f61bd0bc1083a4fa90c736d74d591c9eef51f80c', commit_date=1535629364.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='bloomberg', repo='memray', sha='51aa84e51179d80758b3bbd7dce097b2b2e4fd19', commit_date=1701719904.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pkgconfig\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge libunwind\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='bloomberg', repo='memray', sha='926624f40e4f71bb71c8e22106d7979cb06bb29a', commit_date=1673995384.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='calebbell', repo='thermo', sha='436a9ccd0c73c55df4d4a8f7383493f540a6b13f', commit_date=1641864678.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='calebbell', repo='thermo', sha='71259b242aadd45a5e1d2249e29019a2e856ac04', commit_date=1643426520.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='danielgtaylor', repo='python-betterproto', sha='c82816b8be4d6f240cde4e5f28234e5ee3b26920', commit_date=1697423550.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='danielgtaylor', repo='python-betterproto', sha='ca6b9fe1a2ccf7e8a9b02085a56de905e89eea69', commit_date=1697455035.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using wheel if editable install fails due to BackendUnavailable error\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || \\\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='datalad', repo='datalad', sha='83447c2944e4ed89e0a82ff2a3ea9b74221e8990', commit_date=1606433958.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='datalad', repo='datalad', sha='a9f423a8da0d144c88a74893449b6cb88cee3588', commit_date=1637870957.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dedupeio', repo='dedupe', sha='7d2c79becabe375980613ff3bf66da678cbad658', commit_date=1719492316.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dedupeio', repo='dedupe', sha='9d527acc20f565f6859e9ee6f4a4903c0629a29f', commit_date=1673926972.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='deepchecks', repo='deepchecks', sha='9a5dd7dc90640d987d6ecf03b8bd9a1ea86199cb', commit_date=1658146693.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Fix the invalid version issue by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='deepchecks', repo='deepchecks', sha='e836e79da9cc0ac9e99ae3d4bfdd2982cd299080', commit_date=1661253434.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the invalid version error by setting a valid version\n sed -i 's/version=\"dev\"/version=\"0.0.1\"/' ${ROOT_PATH}/setup.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='default', repo='default', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone\n", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='devitocodes', repo='devito', sha='ccfb8230f2e5030e4a7b3548334e2d03757841f6', commit_date=1708609467.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='devitocodes', repo='devito', sha='e37d6ffc9edf5b0acc2e0b68c1853052c2959fda', commit_date=1719409850.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dipy', repo='dipy', sha='26ad85ff190ad0145f73fc87354cb12f2792a475', commit_date=1712766187.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dipy', repo='dipy', sha='984a2bbff98c7090a222fde52c3b7f6b0b3a189e', commit_date=1751068916.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='django-components', repo='django-components', sha='2472c2ad338a23fba015d4d9816cb62d1325455f', commit_date=1742720064.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='django-components', repo='django-components', sha='e0b718c31495a400d6e8712ed931ce4ab253e673', commit_date=1745142786.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dottxt-ai', repo='outlines', sha='1e8022e210dc7eb193d8e5808a617b1a9dc15644', commit_date=1752229063.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='dottxt-ai', repo='outlines', sha='e9485cf2126d9c14bd749d55f8aa6729d96808d0', commit_date=1732739305.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='geopandas', repo='geopandas', sha='7d50380229eb84375546c2dc586de659096a6e61', commit_date=1531683944.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='geopandas', repo='geopandas', sha='c07ae3c50b6aa20e745b3693321c469e0d828a1c', commit_date=1611525697.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='h5py', repo='h5py', sha='1487a54fb5149603dcc32604df4db418ea4f5236', commit_date=1663429492.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='h5py', repo='h5py', sha='a8e82bcd63de14daddbc84c250a36c0ee8c850f6', commit_date=1602327474.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pkgconfig # Ensure pkgconfig is installed\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge hdf5 # Ensure HDF5 is installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='holoviz', repo='datashader', sha='00220d8d24a4ada0ac8d30b6875004af5b03fdc4', commit_date=1738081225.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='holoviz', repo='datashader', sha='d9403a963e10e57cbf6c00c64c2998e9931097c0', commit_date=1736788153.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install hatchling\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='inducer', repo='loopy', sha='628b37187bec02ecd863662a96d024fbea5e89bf', commit_date=1623653651.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='inducer', repo='loopy', sha='b5da71bb9abf90848e0f196eedbd564d4fc477d2', commit_date=1623736465.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='ipython', repo='ipyparallel', sha='127b48f8bfeb3576c27e734a5414599fbbd4037e', commit_date=1679989417.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='ipython', repo='ipyparallel', sha='1cda27e603bf6e14866d085822afbf19b04d7574', commit_date=1681399422.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='kedro-org', repo='kedro', sha='507ebe4fbb660cd38e7ba5f9fbf89d35bfce29a4', commit_date=1746617473.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='kedro-org', repo='kedro', sha='b3a29d18f8ba2572a371f92b6f862148b77ffec6', commit_date=1744035416.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='lmfit', repo='lmfit-py', sha='9f9af6f36c0928767ea8b004ea8cb5a16aba6b04', commit_date=1634240070.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='lmfit', repo='lmfit-py', sha='f3dfdd8607aca6aceae29fb3fd57e03fd308a472', commit_date=1547606940.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='makepath', repo='xarray-spatial', sha='4df552cb70ae2f6f07b4325bcbf6a1b2afdb6718', commit_date=1643710398.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install pyct\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='makepath', repo='xarray-spatial', sha='59984d859820e6e1cd9f11f1bf7696c04d1924fb', commit_date=1646634548.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install param pyct # Ensure 'param' and 'pyct' are installed\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='man-group', repo='arctic', sha='91c2d269d7ad48db23799b3d21cb191880286806', commit_date=1519908330.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='man-group', repo='arctic', sha='d33d24bb8d6d6625351b316ce55b74ef8c957744', commit_date=1521040101.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='man-group', repo='arcticdb', sha='97493e6cf3b46f52204ce5ef436f1e828f6b0bb3', commit_date=1728297449.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='man-group', repo='arcticdb', sha='dd4617e309c5b31cebe79816ea43bf1136b59365', commit_date=1722514119.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel build\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation .\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mars-project', repo='mars', sha='a4645734e87bd01320ecf28191f6954dd034cbf4', commit_date=1654482585.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mars-project', repo='mars', sha='acecc9c6bdb7fbd45003e4a37424c42a4cec8ac2', commit_date=1652428417.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='0030b3864eb77a90a9442904e7d64d1619c6add5', commit_date=1607478583.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='0880dc18c211a6508240a43ff6fe618c9be7f568', commit_date=1617487191.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='0b6b372fdfcdef15aacbe1c2b82d728f4f1c0401', commit_date=1607478582.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='0f9a6e558a5798880c7b5604346a8a15826d0187', commit_date=1607980018.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package-requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r ${ROOT_PATH}/package-requirements.txt\n fi\n\n # Install optional dependencies that may be needed for tests\n micromamba run -n \"asv_${version}\" pip install pytest-xdist pytest-cov sphinx sphinx-sitemap sphinx-rtd-theme\n\n # Build and install the package\n cd ${ROOT_PATH}\n # First try pip install with --no-deps to avoid dependency conflicts\n if ! micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .; then\n # If that fails, try building wheel and installing\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install --no-deps dist/*.whl\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='108ffe0b19080b39975a93f947162f7371ac9144', commit_date=1539114837.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install project in development mode\n cd ${ROOT_PATH}\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='13a5df0fcbf13852da5613cefd84708e1fd506c6', commit_date=1618051027.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='18372f1526d8b0df776232504afe508ae8944b4c', commit_date=1696946134.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions below 3.9 as per error message\n if [[ \"$version\" < \"3.9\" ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n \n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\" build\n \n # Install additional dependencies from requirements files\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r package/requirements.txt\n fi\n \n # Build and install MDAnalysis from package directory\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e package/\n # Install dependencies after the package installation\n micromamba run -n \"asv_${version}\" pip install -e package/[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='1cd2b3b4f4d70c24c8de234d35ba1a7f900212c0', commit_date=1629565332.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.71\" \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='1cfe404e5d2c2a807162d4e3d440b6969e14d87b', commit_date=1732305078.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build and test dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.36\" \"numpy>=1.21.0\" pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython tidynamics\n \n # Ensure build system requirements are met\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel\n \n # Look for package in subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Build and install MDAnalysis in development mode\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .[test,analysis]\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .[test,analysis]\n else\n echo \"Neither pyproject.toml nor setup.py found. Cannot install package.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='1eca65591fa402584dd29b6d1a02111af30e68eb', commit_date=1691706686.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install additional required packages\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n # Install requirements one by one to better handle failures\n while IFS= read -r requirement || [[ -n \"$requirement\" ]]; do\n # Skip empty lines and comments\n [[ -z \"$requirement\" || \"$requirement\" =~ ^#.*$ ]] && continue\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < package/requirements.txt\n fi\n \n # Build and install MDAnalysis\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='264d6f9357a978444baa1f99411a03453664ab2b', commit_date=1672867721.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='26880f0f0a4bb831fca9668650400858c34f442b', commit_date=1602889606.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='2ee4e9da5aa3a2c1b21fc3d1897bd70e0ab2064d', commit_date=1602770152.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis with optimized flags\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='35d9d2e3ab08e7e6741b57fe02a7215fe3b91a6c', commit_date=1742597504.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and tools\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install networkx matplotlib seaborn netCDF4 mmtf-python gsd biopython parmed griddataformats joblib threadpoolctl scikit-learn hypothesis codecov\n \n # Install MDAnalysis in editable mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='392c8ae5391e20f5e496f7ac03dae08c44deca3b', commit_date=1646727863.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Check for pyproject.toml or setup.py and install accordingly\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='39b0e4cc184725cd0e5e710780c8154ed4de9f4f', commit_date=1524419705.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools>=45.0\" wheel \"cython>=0.29\" numpy\n \n # Install MDAnalysis in editable mode with specific dependencies for this older version\n micromamba run -n \"asv_${version}\" pip install --no-deps --verbose --editable \"${ROOT_PATH}\"\n \n # Install runtime dependencies appropriate for the 2018 version\n micromamba run -n \"asv_${version}\" pip install six mmtf-python mock biopython networkx gsd scipy matplotlib\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='404040598f78db05882fa5b2bba1d35fc6a30510', commit_date=1605754667.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='42c541771ab7aee318783d296caa3e10b33f53eb', commit_date=1613225552.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13,<3.0\" \"numpy>=1.16.0\" \"setuptools>=40.8.0\" wheel\n \n # Additional dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd matplotlib netcdf4 networkx\n \n # Build and install MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='4365f3b07c1bf2ebcf16424b26162102954c5b90', commit_date=1591777205.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='45e56e8314c278e3eb98ed7a6029b74e7435e8be', commit_date=1598362533.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='474be5bbe32270bb9ddf02dc3cab74d3c1312c5e', commit_date=1728274662.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n \n # Check if we're in the right directory and if setup.py exists\n cd ${ROOT_PATH}\n if [ ! -f \"setup.py\" ] && [ ! -f \"pyproject.toml\" ]; then\n # Try to find the package directory\n if [ -d \"package\" ]; then\n cd package\n elif [ -d \"mdanalysis\" ]; then\n cd mdanalysis\n fi\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='481e36a3aa8767c4b895eabfd7ef8b89132ab611', commit_date=1723835551.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx tidynamics biopython\n \n # Check if we're in the right directory and if setup.py exists\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd \"${ROOT_PATH}/package\" || exit 1\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='4fafd51de84d5b89be0559a412acefde0040847c', commit_date=1726273184.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='52b3d261240efed0546d9f15ee42c7f445e72c13', commit_date=1693261706.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with version constraints for Python 3.8\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython\n \n # Install additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock biopython networkx gsd\n \n # Install package in development mode\n cd \"${ROOT_PATH}\"\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found. Attempting direct install.\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='559528f3349bebcaeb82e7f97fd6b76ae8aecce2', commit_date=1501861121.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install numpy and other build dependencies separately to handle version constraints\n if [[ \"$version\" == \"2.7\" ]]; then\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<1.17\" \"scipy<1.3\" \"cython<3.0\" pytest setuptools\n else\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<2.0\" \"scipy<2.0\" \"cython<3.0\" pytest setuptools\n fi\n \n # Install compilers and build tools\n micromamba install -y -n \"asv_${version}\" -c conda-forge compilers wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5948963e0e9d92c9ddd0829ba3df3d9d496bbf01', commit_date=1672872621.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install --upgrade \"pip<24.0\" setuptools wheel\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\"\n \n # Install package requirements from package directory\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install with relaxed constraints and ignore errors\n sed 's/>=/~=/g' ${ROOT_PATH}/package/requirements.txt | grep -v \"numpy\" | micromamba run -n \"asv_${version}\" pip install -r /dev/stdin || true\n fi\n \n # Install additional dependencies needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"biopython~=1.80\" fasteners griddataformats\n \n # Install the package in development mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n else\n echo \"No pyproject.toml or setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='59f4e395178240d5e3f36088d7a4d98ddd0e3607', commit_date=1680135568.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx-rtd-theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5bf1979b36cd4d5f55d691e6927aa606fbeb8791', commit_date=1703619619.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install \"numpy>=1.21.0\" \"cython>=0.29.32\" \"mmtf-python>=1.0.0\" gsd biopython scipy pytest\n\n # Look for package directory containing setup.py\n cd \"${ROOT_PATH}\"\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n fi\n\n # Set environment variables to help with compilation\n export CFLAGS=\"-std=c99 -O3 -funroll-loops -fsigned-zeros\"\n export NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION\n\n # Build and install MDAnalysis with specific build settings\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5c19974c43125c94f98ab45d2f9965c70e427eec', commit_date=1541518721.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-deps \"cython>=0.16\" numpy>=1.10.0 biopython>=1.71 networkx>=1.0 griddataformats>=0.4.0 six>=1.4.0 fasteners mmtf-python>=1.0.0 tqdm>=4.43.0 packaging>=20.0 pytest>=3.3.0 mock\n \n # Build and install MDAnalysis in development mode with specific numpy version constraint\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"numpy>=1.16.5,<2.0\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5cf8c5599e1a27c53e774c436b4e03fe71080f7a', commit_date=1534279531.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional dependencies required by MDAnalysis\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python tqdm mock joblib\n\n # Build and install MDAnalysis with appropriate flags\n if [[ \"$version\" == \"2.7\" ]]; then\n # For Python 2.7, use a more conservative installation approach\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable ${ROOT_PATH}\n else\n # For Python 3.x, use build isolation disabled for better compatibility\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='61e236d45c52030d74ba6277c0a59e8a43a13ea9', commit_date=1593710203.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with pinned versions appropriate for 2020\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29,<0.30\" \"numpy>=1.13.0,<1.19\" \"biopython>=1.71,<1.78\" \\\n \"networkx>=1.0,<2.5\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0,<1.6\" \\\n \"joblib<1.0\" \"mock\" \"psutil<5.8\" \"pytest<6.0\" \"pytest-cov\" \"pytest-xdist<2.0\" \"hypothesis<6.0\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='62c35d49bd9458f2b5057d28d4904391a4a38513', commit_date=1534780584.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='6bc52ec2f0744cdf3c63a2e43aff232381ec4dd1', commit_date=1669766518.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n\n # Ensure we're in the package directory\n cd ${ROOT_PATH}/package || cd ${ROOT_PATH}\n\n # Try to build and install MDAnalysis\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither setup.py nor pyproject.toml found in current directory\"\n exit 1\n fi\n\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='6d5ef34292899958ea2a0148388ecc47cf499da1', commit_date=1620729923.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='73cd1e69be88f1b47b1327c1918c0ad326bec302', commit_date=1603501474.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install build dependencies \n micromamba install -y -n \"asv_${version}\" -c conda-forge numpy scipy cython pytest compilers setuptools pip wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='740cae26820eba538f9990ec904adc9f39a65b2e', commit_date=1619881090.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='740e74e8c61ea01a4b2120bd369b11a58cb9c304', commit_date=1728331627.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create base environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \\\n cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme \\\n mmtf-python mock gsd griddataformats tidynamics \\\n setuptools wheel build\n\n # Look for package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n\n # Try to build and install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found in current directory\"\n exit 1\n fi\n cd ${ROOT_PATH}/benchmarks\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='79dead30cc19cd821617a6746663a68709b276e0', commit_date=1754497815.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python ninja cython packaging\n # Build and install MDAnalysis with meson\n cd ${ROOT_PATH}\n # Ensure we're in the package directory with pyproject.toml\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n fi\n micromamba run -n \"asv_${version}\" python -m pip install --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='7c468a46344d17f91d44059332fcc533dad01cde', commit_date=1567026117.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython networkx matplotlib gsd griddataformats tidynamics\n \n # Install package in development mode with explicit build step\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='81b8ef51e5bc1aa2824294ac6c52818c74975658', commit_date=1741727282.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python mock gsd griddataformats scipy matplotlib biopython networkx tidynamics\n\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"No setup files found in expected locations. Please check repository structure.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='84ee67b99fc3bf165d2f58057fac3315d8bb33af', commit_date=1727431157.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install packaging\n \n # Try to find and build from package directory\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n fi\n else\n echo \"Package directory not found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='8599e47b77a89486a1ffe97a3f146751611d9595', commit_date=1680132537.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='8c3577f5a72bee654d94367e4bef51791ffa5d0b', commit_date=1591177328.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six gsd mmtf-python networkx matplotlib biopython griddataformats GridDataFormats scipy tqdm joblib mock\n\n # Install package in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='93c4a97761469a2fd013c280d04435ae178f2c44', commit_date=1693273052.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions < 3.9 as MDAnalysis requires Python 3.9+\n if [[ $(echo \"$version\" | cut -d. -f2) -lt 9 ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm scipy biopython\n\n # Ensure we're in the root directory\n cd ${ROOT_PATH}\n \n # Look for package subdirectory\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Try to build and install the package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\n \n # Install remaining dependencies\n micromamba run -n \"asv_${version}\" pip install -e .[test,doc]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='95fedb590d1afd268c0a643302cd703b8756f5d3', commit_date=1685194826.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Check Python version requirement (MDAnalysis needs >=3.9)\n if (( $(echo \"$version\" | cut -d. -f1,2 | sed 's/\\.//' | bc) < 39 )); then\n echo \"Skipping Python $version as MDAnalysis requires Python >=3.9\"\n continue\n fi\n\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \\\n numpy scipy cython pytest compilers \\\n gsd networkx matplotlib tqdm pandas\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n\n # Install build dependencies and package\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e \".[test,analysis]\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='9a2cd43fccd4426f91b195ea9902e5b78a6c2e3b', commit_date=1710090427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel matplotlib pandas\n micromamba run -n \"asv_${version}\" pip install GridDataFormats mmtf-python networkx fasteners mda-xdrlib waterdynamics pathsimanalysis mdahole2\n \n # Install the package in editable mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='9ba1ab964920acfc986d8e264f78c965e062e9d0', commit_date=1511010257.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n # Using older versions since this is a 2017 commit\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<=1.13\" \"scipy<=1.0\" \"cython<=0.27\" setuptools wheel pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"sphinx<1.7\" \"sphinx_rtd_theme<0.3\"\n \n cd ${ROOT_PATH}\n # Build and install MDAnalysis with optimizations disabled to avoid timeouts\n CFLAGS=\"-O0\" CXXFLAGS=\"-O0\" micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a1bca526f473325f91c12fb15c887243a2a9244b', commit_date=1646736472.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode with older Cython version\n cd ${ROOT_PATH}\n # First install the core package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable package/\n # Then install the test suite\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a3672f216aa162f2549d1712fad0118b2cc98d49', commit_date=1734398599.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install gsd mmtf-python networkx scipy tqdm packaging matplotlib biopython griddataformats\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a6034750dc47c8904a297efa184292c73c0690bb', commit_date=1692115614.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy \"packaging<22\" pytest\n \n # Install package in development mode\n if [ -f \"package/setup.py\" ]; then\n cd package\n fi\n \n # Try to build and install\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n \n # Return to root directory\n cd ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a64eed98b38307e4699b59eef9f265cbead37ad6', commit_date=1607980019.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with explicit numpy dependency\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable .\n micromamba run -n \"asv_${version}\" pip install numpy scipy\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a6edec02af44fbb4589ef1da25a54a4cc8895ee4', commit_date=1671201733.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd netcdf4 bzip2 mmtf-python\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install pytest-xdist sphinx sphinx_rtd_theme\n\n # Ensure we're in the root directory before building\n cd \"${ROOT_PATH}\"\n\n # Build and install MDAnalysis with optimizations\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n \n # First try pyproject.toml-based install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n # Fallback to setup.py if exists\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found. Checking package subdirectories...\"\n # Check for package subdirectories\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n cd \"${ROOT_PATH}\"\n elif [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" python setup.py develop\n cd \"${ROOT_PATH}\"\n else\n echo \"No installation method found. Build failed.\"\n exit 1\n fi\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='aaa4456db50e237cf580c8c986c00d7c5fbe3075', commit_date=1703622753.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n\n # Navigate to package directory if needed\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd ${ROOT_PATH}/package || exit 1\n fi\n \n # Build and install MDAnalysis with specific compiler flags and additional dependencies\n export CFLAGS=\"-DXDR_GETPOS_RETURNS_UINT32=1 -DXDR_SETPOS_RETURNS_INT=1\"\n # Try installing with conda-forge compilers first\n micromamba install -y -n \"asv_${version}\" -c conda-forge gcc gxx\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b3208b39aab61be53f8b610f1fef628f83262205', commit_date=1725909222.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n # Try to find and use setup.py in package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n # Install MDAnalysis in development mode with verbose output\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b5ba8278b3e09b80109aa06f77832be00f8752f0', commit_date=1510724778.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # For older versions of MDAnalysis, build_ext is needed before install\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b672b595b57f6862d486391d646cf30c31fd8501', commit_date=1598490143.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy\n \n # Build and install MDAnalysis with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b7f36bd148f1eed47f2dc935b89d28c8cae468c4', commit_date=1541446943.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install build dependencies first\n if [ -f \"${ROOT_PATH}/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n fi\n \n # Build and install MDAnalysis with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='bc95e31af1bd1a583161318ab381d005452d48ea', commit_date=1611524871.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='bdb1352f4743aa2101ba2d6b3c9c4fbeb5ae8584', commit_date=1680212962.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='be4b6ee8fa243a0d9e18b936a3d018f2b7418914', commit_date=1650356257.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid Cython errors\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython==0.29.36\" \"numpy<2.0.0\"\n\n # Install required dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"networkx>=2.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.5.0\" \\\n \"biopython>=1.80\" \\\n \"griddataformats>=0.4.0\" \\\n \"packaging\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\"\n\n # Build and install MDAnalysis\n cd \"${ROOT_PATH}\"\n if [ -f \"package/setup.py\" ]; then\n cd package\n # Use --no-build-isolation to ensure our carefully installed dependencies are used\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c07b5c8897688d778e57e1ef34be86f58c969fe7', commit_date=1607478583.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c3289d8994936ce7dbe7842e8877d597ca96360a', commit_date=1752273263.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c565b9d3a11508604a1217e37199ac17a8c618f2', commit_date=1654106359.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython matplotlib tqdm pandas tidynamics\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n # Use build system if pyproject.toml exists\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n # Use setup.py if available\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py or pyproject.toml found in package root directory.\"\n # Try looking in package subdirectory\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Could not find build configuration. Cannot build package.\"\n exit 1\n fi\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c56e8df543e1aba21959a7c7b3029eacd57d9130', commit_date=1661799771.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<2.0.0\" scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n\n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3,<4.0.0\" \\\n \"mock>=3.0.5\" \\\n \"packaging>=20.4\" \\\n \"pytest-xdist>=1.31.0\" \\\n \"pytest-cov>=2.10.1\" \\\n \"pytest-timeout>=1.4.2\" \\\n \"hypothesis>=5.19.0\" \\\n \"psutil>=4.3.1\" \\\n \"biopython>=1.80\" \\\n \"duecredit>=0.9.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"tqdm>=4.43.0\" \\\n \"joblib>=0.12\" \\\n \"fasteners>=0.15\" \\\n \"networkx>=2.0\" \\\n \"threadpoolctl>=2.0.0\"\n\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Build and install MDAnalysis in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c5cbc2551c1175e8d13887783c7ab2894607ac92', commit_date=1671293813.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies that might be needed for compilation\n micromamba install -y -n \"asv_${version}\" -c conda-forge gsd netcdf4 bzip2 gcc gxx\n \n # Install MDAnalysis with verbose output and no build isolation\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c620b141f018628356bb9cdd16eefa640b6080ba', commit_date=1671200774.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n \"numpy<2.0\" \\\n \"cython<3.0\" \\\n setuptools \\\n wheel \\\n pip \\\n build\n\n # Try building and installing from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c6f1a5a5663913f00cc5f727ad0e662bbf23f18f', commit_date=1617010037.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\" setuptools wheel\n \n # Build and install MDAnalysis in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c70504d99e8b6ff7f61778cff1f5956da708ddad', commit_date=1619628547.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c815614b5ae8ed86eaa0d68e10451fde7e72242b', commit_date=1671293292.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with compatible versions\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install requirements one by one to handle dependencies better\n while IFS= read -r requirement; do\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < \"${ROOT_PATH}/package/requirements.txt\"\n fi\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='cb05695ca422c216406a0eae4040c782a2a03812', commit_date=1629822068.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel cython numpy scipy\n \n # Install optional dependencies that might be needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps matplotlib networkx gsd biopython\n \n # Install the package in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='d2e22ffb0cb46af5266e39b940d7f00c1ca293c1', commit_date=1534167809.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='d73b653f19e8446bbb9de51bb41d71f78d148d30', commit_date=1534803427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis with test dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='da77f2bead8c4a634d2ba5b61cd7d7f841c01c0b', commit_date=1671205345.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist\n # Build and install MDAnalysis using setup.py since pyproject.toml is not found\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='daee516f23ead8e42c2e42b7636f9ec243ab306e', commit_date=1603119467.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='dcfa60a2ee0bcee7f54e969666950941905d825a', commit_date=1621773545.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e213f2be8e8741efc7cdddd35dc4bd2d88e0ff85', commit_date=1745000938.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n \n # Install package-specific dependencies\n cd ${ROOT_PATH}/package\n if [ -f \"requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\n fi\n \n # Build and install package\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in package directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e3966303776577e15a043daeceff5a591370398a', commit_date=1534255980.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install additional build dependencies\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python mock joblib\n \n # Install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e8fbd529fc55cb187d38bdef141d74757f22bdc5', commit_date=1594518308.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='eab18cb8418ddb1dd72b44f474833de4a2999884', commit_date=1654100638.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n mmtf-python \\\n networkx \\\n scipy \\\n matplotlib \\\n tqdm \\\n \"cython<3.0\" \\\n \"numpy<2.0\" \\\n pip \\\n setuptools \\\n wheel \\\n build\n\n # Try building and installing with specific C compiler flags\n cd ${ROOT_PATH}\n export CFLAGS=\"-fcommon\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v .\n\n # If that fails, try alternative installation method\n if [ $? -ne 0 ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='eae5845cf5488ae1db1cdcc2075f68406291721e', commit_date=1517964764.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six mmtf-python mock biopython networkx gsd joblib setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='ee4759293e1a4a5109c6b66e133acb1af7d24b0d', commit_date=1567703043.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='f5e9603f35b1e1587c1a1583793374fbfa0f80c5', commit_date=1629232880.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"cython>=0.29.13\" \\\n \"numpy>=1.16.0\" \\\n \"biopython>=1.74\" \\\n \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.0.0\" \\\n \"joblib\" \\\n \"mock\" \\\n \"packaging\" \\\n \"pytest\" \\\n \"pytest-xdist\" \\\n \"pytest-cov\" \\\n \"pytest-timeout\" \\\n \"psutil\" \\\n \"hypothesis\" \\\n \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\" \\\n \"duecredit\"\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='f7a6f47e9c8c4637770c2c0cc0c20da841d11622', commit_date=1516881817.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools pip wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics six\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fb9e0bc786b21c15cefe0027fc83a441e1b19950', commit_date=1685186356.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm pandas biopython griddataformats scipy\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n \n # Build and install MDAnalysis\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fe22dc3794f1f5d466f9128e4c7050fa0d58e62f', commit_date=1619962288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install cython numpy setuptools wheel\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fed8be34a3434a621bacd438d2f9307139a24511', commit_date=1511384425.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Skip Python 2.7 as it's not available in conda-forge anymore\n if [[ \"$version\" == \"2.7\" ]]; then\n echo \"Skipping Python 2.7 as it's no longer supported\"\n continue\n fi\n\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy>=1.16\" \"scipy>=1.5\" cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies for the 2017 commit\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.16\" \"biopython>=1.71\" \\\n \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"six>=1.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \\\n \"mock>=2.0.0\" \"psutil>=4.0.0\" \"fasteners>=0.12.0\" \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \"packaging>=20.0\" \"pytest>=3.3.0\" \"pytest-xdist>=1.4.0\" \"pytest-cov>=2.5.1\"\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # Use older build approach appropriate for 2017 commit\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='ff7ffa10901e2df2be12c3d3dd78e4e0a262e90e', commit_date=1614816697.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with specific version constraints\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.74\" \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.9.3\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \\\n \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode with explicit numpy requirement\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"asv_${version}\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n fi\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='modin-project', repo='modin', sha='be3e716107a185961fc209c343b0feefe0fb9751', commit_date=1684841207.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='modin-project', repo='modin', sha='c5aac3ef99d14305ea9a130e14155fc37495e199', commit_date=1608304159.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='napari', repo='napari', sha='3b6800763f97452ccf8230abf5a65fd6beedd247', commit_date=1606539287.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='napari', repo='napari', sha='dfeefb43af6538dd1e5ad7820128dfc844dc54b1', commit_date=1723973799.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='networkx', repo='networkx', sha='1071e14b81baaa4f0becc1849e85839ae8c671d9', commit_date=1716269137.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='networkx', repo='networkx', sha='81df24ce59b5b4fddfa65cd0a57db96748bba904', commit_date=1745208237.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='newton-physics', repo='newton', sha='5b18850fd8243e4c707b596880c01c1966e5168e', commit_date=1753825967.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='newton-physics', repo='newton', sha='cd07ab2c989df6392253a77e82333ec57a433e94', commit_date=1751556054.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nilearn', repo='nilearn', sha='6c1a76e37cf1c0dd6b800271cb3994f3efd38d07', commit_date=1744125996.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nilearn', repo='nilearn', sha='73fe9520ea705056f89b1cd5982947de13d515a0', commit_date=1754650581.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='numpy', repo='numpy', sha='4092a9e160cc247a4a45724579a0c829733688ca', commit_date=1459109632.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='numpy', repo='numpy', sha='9c3f0bb9955d530d43487f2ab800c765c83a3ea7', commit_date=1716460609.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='5495dc762dae2f09b648588d0f979e03ea3ef88b', commit_date=1741386626.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --verbose --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='626fc946dcfe2150b6aed956c57e89ec907ca44a', commit_date=1746035128.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Build and install WARP with CUDA support\n CUDA_PATH=/usr/local/cuda micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='9958a89058d16e7ac634c46b37d9aad6c14b3f10', commit_date=1740864850.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install torch cuda-python\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" python build_lib.py\n # Now install in editable mode\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='a447d70c372b4dbe1b574ebf587c51c9742272db', commit_date=1748714623.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build steps\n micromamba run -n \"asv_${version}\" pip install warp-lang\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Now try the editable install\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='a81f7e773f2905e06fe52262002c2e34a5daa4d8', commit_date=1743362346.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy cmake ninja pytest\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='d641e89a288746c380ef9b4871f45b0d862fd69e', commit_date=1755703901.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific dependencies and build requirements\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n # First run build_lib.py to generate required libraries\n cd ${ROOT_PATH}\n # Add missing climits header to fix build error\n sed -i '1i\\#include ' warp/native/bvh.cpp\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Then install WARP without CUDA support since error suggests basic build issues first\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='dc693d89d5b85ac7e72c7f4e226eb58a5d54131f', commit_date=1751384285.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Ensure the necessary libraries are built before attempting to install the package\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='optuna', repo='optuna', sha='445048a74c9090e60a82a49605044cc42727642a', commit_date=1650874136.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='optuna', repo='optuna', sha='c634449ebbd2160ee44a1845d1efd6c20ee200ae', commit_date=1714538588.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pandas-dev', repo='pandas', sha='2f4c93e8322775a0bb06429a02429b95ba6abb26', commit_date=1698253642.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pandas-dev', repo='pandas', sha='94a8af55b703fbaea19da9902a9790c7b93dc0ad', commit_date=1686591905.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='posthog', repo='posthog', sha='16075ff5c3671587db9e6a6a3ed396058d0f413b', commit_date=1733419912.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the pyproject.toml issue by ensuring the 'version' field is present\n if ! grep -q \"version\" \"${ROOT_PATH}/pyproject.toml\"; then\n echo \"version = '0.1.0'\" >> \"${ROOT_PATH}/pyproject.toml\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='posthog', repo='posthog', sha='3578a0c1c2b6f4425dc0fddf31d3d256bbf3fc87', commit_date=1655908403.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pvlib', repo='pvlib-python', sha='3692427bef155a32eac525fe965ed8d407a7846e', commit_date=1660774705.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pvlib', repo='pvlib-python', sha='b8c56c5e725ed12f15342c5336f71d52ec8008ce', commit_date=1749300951.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pybamm-team', repo='pybamm', sha='b1fc5950f0d8e5c8e104e00573fdff5561818014', commit_date=1723152711.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pybamm-team', repo='pybamm', sha='e1f52ffcf9811bb7d5046af47c48a2291bfd50b8', commit_date=1653925577.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --constraint \"<3.10,>=3.7\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='bottleneck', sha='c5356daccdab4afc293f56d4b4ff47c154be5bcd', commit_date=1716493787.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='bottleneck', sha='dc01fad42713181b1f2bb13a965eb0651d1308b6', commit_date=1729241092.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --ignore-requires-python\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='xarray', sha='4cbb7cbd86af1ccfe2b3b98f0e36a410f86d77ef', commit_date=1523669869.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='xarray', sha='dd6222f01a476caa96630e26d5b02fad6777a886', commit_date=1747916222.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pydicom', sha='50cd981a068c74b01d854c6cac9bb897fe0b74a9', commit_date=1726970247.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pydicom', sha='87266d96add6a6cccaa3032bbc96b0e3009c6dea', commit_date=1690047796.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pynetdicom', sha='1b701e898b489d561884d20ad78920607a6d1df0', commit_date=1563786471.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pynetdicom', sha='bb1f9d164d5c408fc28e02f924b3821b92cb45ad', commit_date=1555925288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pymc-devs', repo='pymc', sha='6360b005fc610d0505f84885743215a3e09f046e', commit_date=1614035911.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pymc-devs', repo='pymc', sha='a06081e1e9649bd56e3528cb96380efdf6bb2dc0', commit_date=1710322397.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pysal', repo='momepy', sha='6467ae26e8bfca9ba91e7795ab7899aaf89c576c', commit_date=1604013921.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pysal', repo='momepy', sha='7619f2f760d9027434369114a49150e3d3a483fb', commit_date=1603224289.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-adaptive', repo='adaptive', sha='50fae4341c53439f57fcea63346ba3581bd187d4', commit_date=1665457361.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-adaptive', repo='adaptive', sha='a9bb7f612717000dd2cf6899d8ebbf479807f6f5', commit_date=1550239213.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='0422c82a80b3ec0dc7fcbc69562f99e35358ee80', commit_date=1680293750.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='283f5e7480a7c39f0e11abe63e3c1ecd8b5d8911', commit_date=1616243491.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file to fix the invalid version error and setup.cfg\n mkdir -p \"${ROOT_PATH}/control\"\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n echo \"[metadata]\nversion = 0.0.0.dev0\" > \"${ROOT_PATH}/setup.cfg\"\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n\n # Try installing in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='2ce4bbd983ce00aa2998bce00c7c161ff7c0f1d5', commit_date=1640530701.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file since setup.py fails due to invalid version\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='4ef15c4e95ec73cf5fc4d571be103e67b00caadf', commit_date=1647713524.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required build dependencies\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib setuptools wheel\n\n # Fix the version in setup.py before installing\n sed -i 's/version='\"'\"'dev'\"'\"'/version='\"'\"'0.0.0.dev0'\"'\"'/' \"${ROOT_PATH}/setup.py\"\n \n # Build and install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='82f3fe343422289f076d6883a2448d169606f821', commit_date=1701474288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='a042895507367a5d001af7d3febfd8f386497554', commit_date=1739343810.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='a111b03e651d7c1828d264c1b143d9ccc9030b3f', commit_date=1640969033.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='abeb0e46a3d56c98b4534f73202a5a7ef5a0af87', commit_date=1751727883.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='c3c659638fb22bde11e40868f80f540060c50b40', commit_date=1616196419.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='f7d18f17bf90bfb99a06648982b22d1e4af6ccd2', commit_date=1686374157.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-hyper', repo='h11', sha='80805f06e5859692a9dcc32484b2745b7f215a8a', commit_date=1597311658.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-hyper', repo='h11', sha='d64468627a4adeb4140e1480a836c85ba903a2c6', commit_date=1522821575.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pytroll', repo='satpy', sha='94fc4f7749bc2a27f76c7a16a7289037d41120f2', commit_date=1644305622.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pytroll', repo='satpy', sha='aa7f0dd616a973eb2de0e5b77a9ec51d08cc601c', commit_date=1659722497.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pywavelets', repo='pywt', sha='21a30d2af5aca2b3c5f827aa407cb549e2c99fb9', commit_date=1551150162.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --use-pep517\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pywavelets', repo='pywt', sha='74b44217a66199fa2e0f8e036955fc00f5cbc21a', commit_date=1708613848.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='qiskit', repo='qiskit', sha='023cbd4ec646fc81e0434b6de434bb477ad94979', commit_date=1755506488.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='qiskit', repo='qiskit', sha='b12e9ec3cff020983e3dde9b16f5ccc4fd0f4963', commit_date=1715792171.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython setuptools-rust\n micromamba run -n \"asv_${version}\" pip install rustup\n micromamba run -n \"asv_${version}\" rustup toolchain install stable\n micromamba run -n \"asv_${version}\" rustup default stable\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='quantumlib', repo='cirq', sha='01ae51eebf3b18a5cbee9fc0c697d4e1511c07f2', commit_date=1640302944.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='quantumlib', repo='cirq', sha='1a75d9faee3b78765bb4badcf73e3d3e72a3ca2a', commit_date=1744652301.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='rockhopper-technologies', repo='enlighten', sha='d239fa5496a6c342b85343d53a4c16d8db9a87a5', commit_date=1698502059.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-image', repo='scikit-image', sha='0ff35b21293405e9922e44b9dda3818db960b87e', commit_date=1674543103.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-image', repo='scikit-image', sha='c7479c1d7430020a9ee9d92f25a1f0c33e36a7c1', commit_date=1597584715.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='044f1b00a62c9083ce3212a3e69046c9afac0de6', commit_date=1662470783.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='04860335c82d557e663b4cfa218663d1c7bf65fd', commit_date=1689974588.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='05ce8141bc71ad21e55be4d1b3f6609f65e91e49', commit_date=1603277025.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='06e566eb86cfd8c6107cf3bc2b477c97b80002a3', commit_date=1705578508.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='08b6157b0e18480569a5cc08efd44dabad9e60ce', commit_date=1701071115.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='092caed407f3b60de7677d4353bfe0db20a2faab', commit_date=1682603301.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0937b4ab48136eb161ead4abd4806d0708b1bb4c', commit_date=1607961058.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0c65bbfe8ce816a181780d2a249c94dd653e115a', commit_date=1642433763.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0c74b8b7d5cdb60dc3a3240cdb36af40b9f40288', commit_date=1615733031.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0de3b0d1eaacee9f7b15cabc05752cba945c7621', commit_date=1644500459.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1045d16ec13b1cab7878e7555538573d1884aad3', commit_date=1614793397.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='119b837a43d939ec02cf2aeba5bd203f8ebab4c7', commit_date=1649335379.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1415a2890b0451d80feef2d81e921a15d2b9d680', commit_date=1685431571.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='15cb8695a27eb8d4dc281ac3c937e12db8b5a6c1', commit_date=1604221237.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle the multiple packages error\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Create setup.cfg to explicitly specify packages\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\npackage_dir =\n = .\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='16625450b58f555dc3955d223f0c3b64a5686984', commit_date=1652277602.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='191f96908d6bbb46cf7293fb0ac1299f1e8b783d', commit_date=1719904631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1a78993217b52745d63a3495a819efd7f1b0530a', commit_date=1691676945.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1bb0306a1309f9a57d8c652dec731a95cbd0052b', commit_date=1610422145.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to avoid package discovery issues\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1d1aadd0711b87d2a11c80aad15df6f8cf156712', commit_date=1642210241.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1db03ce68be362baa12330ae3f42b9673863fa52', commit_date=1626800410.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='210740408a732940430047fe9437c2193735573f', commit_date=1719586131.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='226da0d7c458816776549c2580abaa4782dc4c48', commit_date=1637400914.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='28831879f2b5a8f623623735480399735c1bb742', commit_date=1755578702.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2e213c618841f3635885bab034606512c40a7fd4', commit_date=1646246849.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='320b4c61f97fec3facc3c4c2b4cf9351d3425b44', commit_date=1596283836.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='32c5d05cbd7551fd983a250945013239e0e5cb94', commit_date=1631705680.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='408f561b87f9955e92619cbf924d595a2655344f', commit_date=1678175921.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='432778464cbffc8ca675c1df786c31f8c23fc62c', commit_date=1642715056.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='45a817933ef51a24f0c5863c1026b4fe664b26fa', commit_date=1608647213.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to handle the multiple packages issue\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4685cf624582cbc9a35d646f239347e54db798dc', commit_date=1652472968.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='49d26cb63fefe43c9b310136e4f2c172d8c433cb', commit_date=1599140563.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools==60.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and environment variables\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n\n # Install scikit-learn in development mode with specific build settings\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4bc61a09eac44a86758c6a02a2b47f912a696d3b', commit_date=1719575535.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4e44edebf9e811c718c2842b65db2eb41ba01786', commit_date=1723709827.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='52d93e141a5d874bd288f15cc1d8990f09721aad', commit_date=1754304060.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='55a65a2fa5653257225d7e184da3d0c00ff852b1', commit_date=1695213631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='584d413fec25fb5c38f06c1fe88e652111395330', commit_date=1675930888.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='598045569c8f96fb345059f5316ea8903d374ff4', commit_date=1615476313.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5a332e77a10a44107276843d8532ef79f239c8f3', commit_date=1681854133.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5a850eb044ca07f1f3bcb1b284116d6f2d37df1b', commit_date=1657115862.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5b46d01f8d5015114644b91ce88ee4bc4fa5386d', commit_date=1680769691.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5c4e9a0fd82dd096bbdf78b69c264a741c768a86', commit_date=1690911539.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5ffec3233034e0413f548380d4a22f4e0eecae94', commit_date=1678722797.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='63a1a31a17f9bd9cdf617b2cf04bfaf2f32f0a17', commit_date=1639082235.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='64d54483edfa55ab44d836f9b08ff1bd38f7f6bb', commit_date=1627659978.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6595229d116b128c5b36f204dc941f69e14abc7f', commit_date=1718288797.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='673f6259f3fb7bd2a057b1889e23b280fe638998', commit_date=1612389138.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='67ca4dda1d61c9ad95ed68b04cb40da2c822e960', commit_date=1678114713.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='687e84a126965b4179b02d86041a9e997eba87c9', commit_date=1751036214.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='691b00f4b7d169d38cc46cf14668a5029b2df8eb', commit_date=1728910531.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6d7d0f275db08ca97e7ce9765e5e8f0604e490dd', commit_date=1641981733.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6f91cbebe5c439d5712860315616b70cd2ca9f87', commit_date=1633437528.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='701537ecca85a333449814c82ac2b78db5f534a8', commit_date=1682379515.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='70ca21f106b603b611da73012c9ade7cd8e438b8', commit_date=1713791446.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='767e9ae7e4fec8bea36c0433ab42f500aacfde64', commit_date=1651223539.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='769da3d51feef52b97b8129bf4700cf088a247b2', commit_date=1613120619.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" wheel\n\n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='76c28285d3d3eb6a2834b7d1db01e296187c60b8', commit_date=1677233852.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7798fd829d0eb3637da17cc5cb359bf52efa551f', commit_date=1630429058.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7a2f5ca3a8478333f194a085b0c3635d75fcdf4d', commit_date=1678442780.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7aabe53e730947df0f6f1f85d640e6daea5bfc9f', commit_date=1634742992.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7af0a18996efb10fcbcdb15c7c132d2eb36be736', commit_date=1687508727.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7b715111bff01e836fcd3413851381c6a1057ca4', commit_date=1624465784.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7c835d550c1dcaf44938b1c285db017a773d7dba', commit_date=1662054353.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7f1d4d05064a160e19f786bfbac8996cf0ecac5d', commit_date=1707518612.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='80ebe21ec280892df98a02d8fdd61cbf3988ccd6', commit_date=1638310769.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='830864629e21509980a9c3904c9bb7bf2be8fec5', commit_date=1655213679.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8525ba5d3c3b5423a5599e654ce73b931882a434', commit_date=1754632277.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='863c552c448118249563f0e709ea83a1a9b2fc7f', commit_date=1612010007.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='871892cef9bc70224233fdf2140c896874c07b57', commit_date=1659000389.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='88c2db24bd3efb631372aa971270d6cb690d914d', commit_date=1726476355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='897c0c570511be4b7912a335052ed479ac5ca1f3', commit_date=1705781316.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8a71b840d3d7f6e5db9f9faf3b6c44f8ed6a3850', commit_date=1705345976.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8a7684705f636a8dfcde8e2239d2e0bcd624ac54', commit_date=1647426404.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8ad7c3f02daae525ee83231fbd33fb65e8e05288', commit_date=1633621378.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8eef0e767c4bdd2fdb83f51b162afa32386d5973', commit_date=1692883694.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9590c07128d3bad5978f08eeb34613d347b96e38', commit_date=1719499549.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='961afc72e0222cb108b77b68c145ea4424f089da', commit_date=1751880029.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='99410b1bdea296a0df48026aaee85472bf3cb7cf', commit_date=1625818419.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='99562100e941f0972a5a65484ff80f407eeb5137', commit_date=1674572593.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9c9c8582dff9f4563aa130ef89f155bad0051493', commit_date=1668796144.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9e38cd00d032f777312e639477f1f52f3ea4b3b7', commit_date=1705585714.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a09a62eda27720a0cb949ea24b1e21d358f95176', commit_date=1676040745.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a47d569e670fd4102af37c3165c9b1ddf6fd3005', commit_date=1652372475.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a67ebbebc173007735e62eef7878c08435d28d89', commit_date=1718987804.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a85b14d4799ba7c4e13e0e942e599f8077dc182e', commit_date=1679350355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a8b1905e8f977fcd4d6a348678bb1e82ed9b3310', commit_date=1606807943.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='aa2131f9bdcfa7ff0dacfd6a47c207cbb68a49fa', commit_date=1751370298.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='abbeacc2daee2b213274924a5a4ffe6cbafb0627', commit_date=1651693256.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='aca8f20db461ca0dd70b02b6a1f41b957b2b12ee', commit_date=1665069106.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ad91259f20529306efe445f5a1da4dccc8c81b5a', commit_date=1663256210.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b1202af3b379e698539a2719f2b1e28706ce5388', commit_date=1638654791.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b3aea0053dadcb67adfc39a90c70ffca607a534f', commit_date=1643205359.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b5d55b4fd19ca97d68e4e34e5822865b0a8e90d2', commit_date=1651487470.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b609562c610822ad4b3c11a9e7a22710aba438af', commit_date=1637744681.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b6b6f63ebefe16403d11e8a0a2281b6e2a811933', commit_date=1678791874.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b8229daafee0e50690d4b8447f93cf1069ba6880', commit_date=1701274890.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='bc7cd3189bc817545791071515693445e1e271db', commit_date=1617352203.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='bf0886bae0ccbc8c5d285b6e2affe7e40474f970', commit_date=1619532370.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c0eb3d37244cc4bf35b82e18bff37320e198b038', commit_date=1670930060.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c217527af5744b9d0db8761c1e3667552312e5e7', commit_date=1652946509.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c300a8f2178fcae847f82ad548fe9452f2ba8bbb', commit_date=1658415495.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c47205fb7d45de50de4afa9760d974e754f103e1', commit_date=1707735651.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c63b21ec309f742defd56033eadfc8f7bf5b510b', commit_date=1711607317.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c6ad7361c8fc68188b83070aa0b6b797058c06fa', commit_date=1646214356.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9138537790cc0fa352968eed927433fe17ee17c', commit_date=1701967415.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9525d1600ecd526b9b98e275fc1b85782c25dea', commit_date=1634072165.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9f9b041758c3fa5fdf74b15995a3e3607b0ad5a', commit_date=1737104589.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='caaa1f52a0632294bf951a9283d015f7b5dd5dd5', commit_date=1732650609.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='cbe8648c33b94bd919c35f4d1e2ae1c4432d9749', commit_date=1748364732.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d5901462551283b689284e582152666faf0dc1da', commit_date=1676911719.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d5df806c9715443f5dc7de9023a1b7aa2045eae4', commit_date=1677234005.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d85b1d3302a3ff45179a5826a747e8ee2562f143', commit_date=1674489554.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d8d5637cfe372dd353dfc9f79dbb63c3189a9ecc', commit_date=1644836117.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d92c76986ac6553ce8e0fe2c1bbaea500c105cc7', commit_date=1679480310.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dc1ea2751e8f4e18f61c7e6d767cf42c6e636256', commit_date=1608485758.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dc580a8ef5ee2a8aea80498388690e2213118efd', commit_date=1670501069.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='de67a4420f1713058070802ad593cbcd2ee2d5f3', commit_date=1677582108.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='df692c03c1a6003878c6fc4d2f9f222d304dcee3', commit_date=1649449476.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dfaef0c6c3aef0d00c72573728c90c1d542e2957', commit_date=1657123469.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dfda968f1d0b3b1ecaeb4125d3e903416eaf18ec', commit_date=1678100532.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e113897235feaf309eaaed24001ca96f3608602f', commit_date=1648574496.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for potential warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e1db2a8173ca37e561cdfa4384481501c4d50868', commit_date=1644639631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e1e8c66e05dd638ae785855bfb637e0180aea99c', commit_date=1642748755.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e411c29625e66f7e440f1acce4069e01201cf122', commit_date=1672782103.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e64714637d8cc9f4724ae21ea500e4bdc57b0a39', commit_date=1629207428.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e7ae63f784c5f85af41cf8f346d194775f01f333', commit_date=1694440694.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eae3f294d3ba8ae636730537faef4cdd612083ff', commit_date=1678119642.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eb2920766d7b2ffb04359a1dc8b6c611960931b7', commit_date=1725568507.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eb85684feb0505694e66365ba9f4d10a409f8f0b', commit_date=1697017427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ee524f455dbf0285f7b121a08f1e9613a518abcf', commit_date=1617906457.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eecde00c7a706546271ff40d7d492b5f27046d2b', commit_date=1619516333.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ef200eb16813f4e579f3a4e6cd4603e16f72f5a8', commit_date=1680030341.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ef82b778ecaeee11d6bfd005f59e882410d330b6', commit_date=1751882162.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f1d3417b086550be670cbfbb5b3c1760ac99203f', commit_date=1646068982.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f4ed8ef5e4498c9de2ff4b713c1695d6f312ffba', commit_date=1733748660.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f812e2a27619650463cb12d765f1b443b47c0828', commit_date=1628181136.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f86f41d80bff882689fc16bd7da1fef4a805b464', commit_date=1695653805.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f965fcc0634e47b7230e120850cf7bb4efeb96e7', commit_date=1674829022.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f96ce586eecb361d53b192ea3b44098d1bd49a77', commit_date=1637843007.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fba028b07ed2b4e52dd3719dad0d990837bde28c', commit_date=1733159260.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fe08016877e8bd715816cf9fbfb1fb697c3446d2', commit_date=1754300286.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fee76cc5405c01e283a3b079dcb865f3017d5007', commit_date=1705008338.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ff9344f3d8d11d38fa3a2497199113e5bac9537c', commit_date=1666642605.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='57086e91b65b88a95c89449aa501ff68a61dc39a', commit_date=1563459886.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='faa240fd7469176036a91430ae6a0a45e627c94a', commit_date=1531145592.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scipy', repo='scipy', sha='83dbd97a76af8621dd0228a797f5207bed094c23', commit_date=1679643125.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran pybind11\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge openblas\n micromamba run -n \"asv_${version}\" git submodule update --init\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scipy', repo='scipy', sha='b919b4aa67a541b1fef91820a4e94156f7dd36d2', commit_date=1731196689.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install pybind11\n micromamba run -n \"asv_${version}\" pip install openblas\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='cartopy', sha='9a4d894d9adab3b3a8d9cee6299581ba0ef9ec20', commit_date=1662748176.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='cartopy', sha='d9825f18dc6a70b5b4ef6bc5bf48d8025eef1e8e', commit_date=1581379933.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --upgrade setuptools\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='iris', sha='33deead5846b37019902ba067c87e710e55ff6e6', commit_date=1650551816.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='iris', sha='b2ce2a34e2eef7e3d6203c77ada7ed4ce89e3145', commit_date=1573652360.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='anndata', sha='2712af6efcf2d4356f4185a10e92328168710d9f', commit_date=1680623010.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython flit-core setuptools_scm\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='anndata', sha='df213f659f0e9eadfcab4af48ee98de7145252a7', commit_date=1733842403.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='scanpy', sha='7f3f89ac02e924a3a6d55c31730cfaf23b0b4223', commit_date=1744636041.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='scanpy', sha='ad657edfb52e9957b9a93b3a16fc8a87852f3f09', commit_date=1718709475.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='shapely', repo='shapely', sha='3c3a83986ac5bf434e0ca6b7bd16571a1ddac0a4', commit_date=1696785164.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='shapely', repo='shapely', sha='ff2ceac81cca6240c459eba5a5ce07084fe25ad2', commit_date=1662401853.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sourmash-bio', repo='sourmash', sha='9230fce7479c547c96dabe0c1a749a71a4b9e77c', commit_date=1650894889.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sourmash-bio', repo='sourmash', sha='d2d638b645048cc93377fb9aff8a3be8c937b8b3', commit_date=1613310154.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge rust\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='spotify', repo='voyager', sha='49416c5db539a40adba2588bfe19dc8736db01b2', commit_date=1734118555.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='spotify', repo='voyager', sha='88cfc468617fde8360ac6db7e71bc578ba49ed16', commit_date=1725990271.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"${ROOT_PATH}\"\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sunpy', repo='sunpy', sha='01ea7b5e2760c24e08386f95fd5fd1c0f73da47f', commit_date=1739035442.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sunpy', repo='sunpy', sha='770f95dbfb033ffacc7172a3cff5158b09f7efe4', commit_date=1651836877.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython extension-helpers\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='textualize', repo='rich', sha='1de94713811101702b8fcf283c64d1a5de5a8213', commit_date=1657547667.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='textualize', repo='rich', sha='cb92947610614e04116f82cb001ed44dda1699fb', commit_date=1647342081.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='tqdm', repo='tqdm', sha='0f823e79f303b4a93ef1381badb1e65757e5070f', commit_date=1603641812.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='tqdm', repo='tqdm', sha='42761473f9edf276937cc3a28a6fcabc59f5f97d', commit_date=1575632008.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='xorbitsai', repo='xorbits', sha='aee883be1dcd4cbbd43d67794932d5c858fcffe2', commit_date=1676955703.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to non-editable if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='xorbitsai', repo='xorbits', sha='ebc391fe0fa55599c3197c52408bd43a4bd9476f', commit_date=1695401335.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml if setup.py is not found\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n else\n echo \"Neither 'setup.py' nor 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE_ARGS:?Need to set ASV_MACHINE_ARGS}\"\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\npip install \"cython<3\"\nbash maintainer/install_all.sh develop\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME ${ASV_MACHINE_ARGS}\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" + "Task(owner='astropy', repo='astropy', sha=None, commit_date=0.0, tag='pkg')": { + "building_data": "#!/usr/bin/env bash\n# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks.\nset -euo pipefail\n\n###### SETUP CODE (NOT TO BE MODIFIED) ######\n# Loads micromamba, common helpers, and persisted variables from the env stage.\nsource /etc/profile.d/asv_utils.sh || true\nsource /etc/profile.d/asv_build_vars.sh || true\neval \"$(micromamba shell hook --shell=bash)\"\n\nROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo\nREPO_ROOT=\"$ROOT_PATH\"\nTARGET_VERSIONS=\"${PY_VERSION:-${ASV_PY_VERSIONS:-}}\"\nif [[ -z \"${TARGET_VERSIONS}\" ]]; then\n echo \"Error: No PY_VERSION set and ASV_PY_VERSIONS not found.\" >&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\"\n\n export CFLAGS=\"${CFLAGS:-} -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"$ENV_NAME\" pip install -e . scipy matplotlib\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\\033[1;34m[build]\\033[0m %s\\n\" \"$*\"; }\nwarn() { printf \"\\033[1;33m[warn]\\033[0m %s\\n\" \"$*\" >&2; }\ndie() { printf \"\\033[1;31m[fail]\\033[0m %s\\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some generic packages needed for building/testing.\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge pip git conda mamba libmambapy \\\n numpy scipy cython joblib threadpoolctl pytest \\\n compilers meson-python cmake ninja pkg-config tomli\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \\\"$IMP\\\" --repo-root \\\"$REPO_ROOT\\\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"\n", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"$ENV_NAME\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n fi\n\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\n \n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"$ENV_NAME\" pip install meson-python cython\n export CFLAGS=\"${CFLAGS:-} -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"$ENV_NAME\" python \"${ROOT_PATH}/build_lib.py\"\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"$ENV_NAME\" python \"${ROOT_PATH}/make_version.py\"\n fi\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"$ENV_NAME\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"${CFLAGS:-} -Wno-error=incompatible-pointer-types\"\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <&2\n exit 1\nfi\n###### END SETUP CODE ######\n\n# -----------------------------\n# Agent guidance (read-first)\n# -----------------------------\n# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version},\n# with NO build isolation, then run health checks.\n#\n# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to):\n# - Add extra conda/pip dependencies needed to build this project.\n# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars).\n# - Run arbitrary micromamba/pip commands in the target env.\n# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo.\n# - Change files in the repo if needed (e.g., fix a missing #include).\n# - Anything else needed to get a successful editable install.\n#\n# MUST:\n# - Keep this script idempotent.\n# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent.\n# - Do not modify the SETUP CODE or helper functions below.\n#\n# DO NOT:\n# - Change env names or Python versions outside MODEL EDIT AREA.\n# - Use build isolation unless absolutely necessary.\n\n# -----------------------------\n# Helpers (do not modify)\n# -----------------------------\nlog() { printf \"\u001b[1;34m[build]\u001b[0m %s\n\" \"$*\"; }\nwarn() { printf \"\u001b[1;33m[warn]\u001b[0m %s\n\" \"$*\" >&2; }\ndie() { printf \"\u001b[1;31m[fail]\u001b[0m %s\n\" \"$*\" >&2; exit 1; }\n\n# Conservative default parallelism (override if the repo benefits)\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-2}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-2}\"\n\n# -----------------------------\n# Build & test across envs\n# -----------------------------\nfor version in $TARGET_VERSIONS; do\n ENV_NAME=\"asv_${version}\"\n log \"==> Building in env: $ENV_NAME (python=$version)\"\n\n if ! micromamba env list | awk '{print $1}' | grep -qx \"$ENV_NAME\"; then\n die \"Env $ENV_NAME not found. Did docker_build_env.sh run?\"\n fi\n\n # Import name resolution (kept simple for the agent)\n IMP=\"${IMPORT_NAME:-}\"\n if [[ -z \"$IMP\" ]]; then\n if ! IMP=\"$(asv_detect_import_name --repo-root \"$REPO_ROOT\" 2>/dev/null)\"; then\n die \"Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh\"\n fi\n fi\n log \"Using import name: $IMP\"\n\n # -----------------------------\n # MODEL EDIT AREA: repo-specific tweaks (optional)\n # -----------------------------\n # Examples (uncomment if needed for this repo):\n #\n # log \"Updating submodules\"\n # git -C \"$REPO_ROOT\" submodule update --init --recursive\n #\n # log \"Installing extra system libs via conda-forge\"\n # micromamba install -y -n \"$ENV_NAME\" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp'\n #\n # log \"Pre-generating Cython sources\"\n # micromamba run -n \"$ENV_NAME\" python -m cython --version\n #\n # export CFLAGS=\"${CFLAGS:-}\"\n # export CXXFLAGS=\"${CXXFLAGS:-}\"\n # export LDFLAGS=\"${LDFLAGS:-}\"\n # -----------------------------\n\n # Install some basic micromamba packages.\n\n micromamba install -y -n \"$ENV_NAME\" -c conda-forge git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"$ENV_NAME\" pip install meson-python cython\n\n # Editable install (no build isolation preferrably). Toolchain lives in the env already.\n log \"Editable install with --no-build-isolation\"\n PIP_NO_BUILD_ISOLATION=1 micromamba run -n \"$ENV_NAME\" python -m pip install --no-build-isolation -v -e \"$REPO_ROOT\"\n\n # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1)\n log \"Running smoke checks\"\n micromamba run -n \"$ENV_NAME\" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke}\n\n # Machine-readable markers (useful in logs)\n echo \"::import_name=${IMP}::env=${ENV_NAME}\"\ndone\n\nlog \"All builds complete \u2705\"", + "dockerfile_data": "# syntax=docker/dockerfile:1.7\n\nFROM buildpack-deps:jammy AS base\n\nARG REPO_URL\nARG COMMIT_SHA\n\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nFROM base AS env\nARG REPO_URL\nARG COMMIT_SHA\n\n# Entrypoint is inherited by pkg\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\nENTRYPOINT [\"/entrypoint.sh\"]\n\nRUN git clone \"$REPO_URL\" /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout \"$COMMIT_SHA\"\nLABEL vcs.url=\"$REPO_URL\" vcs.ref=\"$COMMIT_SHA\"\n\n# ---- ENV STAGE: create envs, persist vars, install smoke tool ----\nCOPY docker_build_env.sh /workspace/repo/docker_build_env.sh\nRUN chmod +x /workspace/repo/docker_build_env.sh && \\\n /workspace/repo/docker_build_env.sh\n\n\nFROM env AS pkg\n\n# ---- PKG STAGE: build+test the package for each ASV Python ----\nCOPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh\nRUN chmod +x /workspace/repo/docker_build_pkg.sh && \\\n /workspace/repo/docker_build_pkg.sh\n# If you want to restrict to one version at build time, replace with:\n# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh\n", + "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n", + "env_building_data": "#!/usr/bin/env bash\nset -euo pipefail\n\n# -------- Helpers installed for all shells --------\ninstall_profile_helpers() {\n cat >/etc/profile.d/asv_utils.sh <<'EOF'\n# asv_utils.sh \u2014 login/interactive shell helpers for ASV builds\nexport MAMBA_ROOT_PREFIX=\"${MAMBA_ROOT_PREFIX:-/opt/conda}\"\n\n# Initialize micromamba for bash shells (no-op if not present)\nif command -v micromamba >/dev/null 2>&1; then\n eval \"$(micromamba shell hook --shell=bash)\"\nfi\n\n# Find and cd into the first directory that contains an asv.*.json\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n if [[ -n \"$match\" ]]; then\n cd \"$(dirname \"$match\")\" || echo \"Failed to change directory\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n return 1\n fi\n}\n\n# Return just the conf filename (e.g., asv.conf.json)\nasv_conf_name() {\n local f\n f=$(find . -type f -name \"asv.*.json\" | head -n 1)\n [[ -n \"$f\" ]] && basename \"$f\" || return 1\n}\n\n# Build performance knobs (overridable)\nexport MAKEFLAGS=\"${MAKEFLAGS:--j$(nproc)}\"\nexport CMAKE_BUILD_PARALLEL_LEVEL=\"${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}\"\nexport NPY_NUM_BUILD_JOBS=\"${NPY_NUM_BUILD_JOBS:-$(nproc)}\"\n\n# Shared pip cache to speed repeated editable builds\nexport PIP_CACHE_DIR=\"${PIP_CACHE_DIR:-/opt/pipcache}\"\nmkdir -p \"$PIP_CACHE_DIR\"\nEOF\n}\n\n# -------- Persisted build variables --------\nwrite_build_vars() {\n local py_versions=\"$1\"\n local import_name=\"$2\"\n\n mkdir -p /etc/asv_env\n echo \"$py_versions\" > /etc/asv_env/py_versions\n echo \"$import_name\" > /etc/asv_env/import_name\n\n # Exported for every future shell (pkg script, interactive, etc.)\n cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY'\n#!/usr/bin/env python\nimport argparse, importlib, pathlib, sys\n\ndef import_and_version(name: str):\n m = importlib.import_module(name)\n ver = getattr(m, \"__version__\", \"unknown\")\n print(f\"{name} imported ok; __version__={ver}\")\n\ndef probe_compiled(name: str, max_ext: int = 10):\n m = importlib.import_module(name)\n if not hasattr(m, \"__path__\"):\n print(\"No package __path__ (likely a single-module dist); skipping compiled probe.\")\n return\n pkg_path = pathlib.Path(list(m.__path__)[0])\n so_like = list(pkg_path.rglob(\"*.so\")) + list(pkg_path.rglob(\"*.pyd\"))\n failed = []\n for ext in so_like[:max_ext]:\n rel = ext.relative_to(pkg_path).with_suffix(\"\")\n dotted = \".\".join([name] + list(rel.parts))\n try:\n importlib.import_module(dotted)\n except Exception as e:\n failed.append((dotted, str(e)))\n if failed:\n print(\"Some compiled submodules failed to import:\")\n for d, err in failed:\n print(\" -\", d, \"->\", err)\n sys.exit(1)\n else:\n print(\"Compiled submodules (if any) import ok\")\n\ndef main():\n p = argparse.ArgumentParser()\n p.add_argument(\"--import-name\", required=True)\n p.add_argument(\"--repo-root\", default=\".\")\n p.add_argument(\"--pytest-smoke\", action=\"store_true\",\n help=\"Run a quick pytest smoke: -k 'not slow' --maxfail=1\")\n p.add_argument(\"--max-ext\", type=int, default=10)\n args = p.parse_args()\n\n import_and_version(args.import_name)\n probe_compiled(args.import_name, max_ext=args.max_ext)\n\n if args.pytest_smoke:\n import subprocess, os\n if any((pathlib.Path(args.repo_root)/p).exists() for p in (\"tests\", \"pytest.ini\", \"pyproject.toml\")):\n print(\"Running pytest smoke...\")\n rc = subprocess.call([sys.executable, \"-m\", \"pytest\", \"-q\", \"-k\", \"not slow\", \"--maxfail=1\"], cwd=args.repo_root)\n if rc != 0:\n sys.exit(rc)\n else:\n print(\"No tests detected; skipping pytest smoke.\")\n print(\"Smokecheck OK \u2705\")\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/asv_smokecheck.py\n}\ninstall_smokecheck\n\n# -------- Install an import-name detector CLI --------\ninstall_detect_import_name() {\n cat >/usr/local/bin/detect_import_name <<'PY'\n#!/usr/bin/env python\nimport argparse, pathlib, re, sys, subprocess, configparser, json\n\n# --- optional TOML loader (py3.11+: tomllib; else tomli if available) ---\ntry:\n import tomllib as toml\nexcept Exception:\n try:\n import tomli as toml\n except Exception:\n toml = None\n\nEXCEPTIONS = {\n # common dist\u2192import mismatches\n \"scikit-learn\": \"sklearn\",\n \"opencv-python\": \"cv2\",\n \"pyyaml\": \"yaml\",\n \"beautifulsoup4\": \"bs4\",\n \"pillow\": \"PIL\",\n \"mysqlclient\": \"MySQLdb\",\n \"psycopg2-binary\": \"psycopg2\",\n \"opencv-contrib-python\": \"cv2\",\n \"protobuf\": \"google\", # top-level package\n \"apache-beam\": \"apache_beam\",\n}\n\n# All the package names we typically query.\nEXCEPTIONS.update({\n # --- core scientific stack ---\n \"scikit-learn\": \"sklearn\",\n \"numpy\": \"numpy\",\n \"pandas\": \"pandas\",\n \"scipy\": \"scipy\",\n \"scikit-image\": \"skimage\",\n \"pywt\": \"pywt\",\n \"xarray\": \"xarray\",\n \"bottleneck\": \"bottleneck\",\n \"h5py\": \"h5py\",\n \"networkx\": \"networkx\",\n \"shapely\": \"shapely\",\n\n # --- ML / stats / optimization / viz ---\n \"optuna\": \"optuna\",\n \"arviz\": \"arviz\",\n \"pymc\": \"pymc\",\n \"kedro\": \"kedro\",\n \"modin\": \"modin\",\n \"napari\": \"napari\",\n \"deepchecks\": \"deepchecks\",\n \"voyager\": \"voyager\", # spotify/voyager\n \"warp\": \"warp\", # NVIDIA/warp\n \"newton\": \"newton\", # newton-physics/newton\n\n # --- domain / ecosystem libs ---\n \"geopandas\": \"geopandas\",\n \"cartopy\": \"cartopy\",\n \"iris\": \"iris\",\n \"anndata\": \"anndata\",\n \"scanpy\": \"scanpy\",\n \"sunpy\": \"sunpy\",\n \"pvlib-python\": \"pvlib\",\n \"PyBaMM\": \"pybamm\",\n \"momepy\": \"momepy\",\n \"satpy\": \"satpy\",\n \"pydicom\": \"pydicom\",\n \"pynetdicom\": \"pynetdicom\",\n\n # --- file formats / IO / infra ---\n \"asdf\": \"asdf\",\n \"arrow\": \"pyarrow\", # apache/arrow\n \"ArcticDB\": \"arcticdb\",\n \"arctic\": \"arctic\",\n\n # --- web / frameworks / utils ---\n \"django-components\": \"django_components\",\n \"h11\": \"h11\",\n \"tqdm\": \"tqdm\",\n \"rich\": \"rich\",\n \"posthog\": \"posthog\",\n \"datalad\": \"datalad\",\n \"ipyparallel\": \"ipyparallel\",\n\n # --- numerical / symbolic / control ---\n \"autograd\": \"autograd\",\n \"python-control\": \"control\",\n \"loopy\": \"loopy\",\n \"thermo\": \"thermo\",\n \"chempy\": \"chempy\",\n \"adaptive\": \"adaptive\",\n\n # --- scientific image / signal ---\n \"metric-learn\": \"metric_learn\",\n\n # --- quantum / physics ---\n \"Cirq\": \"cirq\",\n \"memray\": \"memray\",\n \"devito\": \"devito\",\n\n # --- bio / chem / data ---\n \"sourmash\": \"sourmash\",\n \"dipy\": \"dipy\",\n\n # --- protocol buffers / codegen / outlines ---\n \"python-betterproto\": \"betterproto\",\n \"outlines\": \"outlines\",\n\n # --- DS viz / raster ---\n \"datashader\": \"datashader\",\n \"xarray-spatial\": \"xarray_spatial\",\n\n # --- misc ---\n \"enlighten\": \"enlighten\",\n \"xorbits\": \"xorbits\",\n \"geopandas\": \"geopandas\",\n \"lmfit-py\": \"lmfit\",\n \"mdanalysis\": \"MDAnalysis\",\n \"nilearn\": \"nilearn\",\n})\n\n\nEXCLUDE_DIRS = {\n \".git\", \".hg\", \".svn\", \".tox\", \".nox\", \".venv\", \"venv\",\n \"build\", \"dist\", \"__pycache__\", \".mypy_cache\", \".pytest_cache\",\n \"docs\", \"doc\", \"site\", \"examples\", \"benchmarks\", \"tests\", \"testing\",\n}\n\ndef _norm(s: str) -> str:\n return re.sub(r\"[-_.]+\", \"\", s).lower()\n\ndef read_pyproject(root: pathlib.Path):\n cfg = {}\n p = root / \"pyproject.toml\"\n if toml and p.exists():\n try:\n cfg = toml.loads(p.read_text(encoding=\"utf-8\"))\n except Exception:\n pass\n return cfg\n\ndef read_setup_cfg(root: pathlib.Path):\n p = root / \"setup.cfg\"\n cp = configparser.ConfigParser()\n if p.exists():\n try:\n cp.read(p, encoding=\"utf-8\")\n except Exception:\n pass\n return cp\n\ndef dist_name_from_config(pyproject, setup_cfg):\n # PEP 621 name\n name = (pyproject.get(\"project\", {}) or {}).get(\"name\")\n if not name:\n # setup.cfg [metadata] name\n if setup_cfg.has_section(\"metadata\"):\n name = setup_cfg.get(\"metadata\", \"name\", fallback=None)\n # setup.py --name as last resort\n return name\n\ndef package_roots_from_config(root, pyproject, setup_cfg):\n roots = set([root])\n # setuptools package-dir mapping\n # pyproject: [tool.setuptools.package-dir] \"\" = \"src\"\n pkgdir = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {}).get(\"package-dir\", {})\n if isinstance(pkgdir, dict):\n if \"\" in pkgdir:\n roots.add((root / pkgdir[\"\"]).resolve())\n for _, d in pkgdir.items():\n try:\n roots.add((root / d).resolve())\n except Exception:\n pass\n # setup.cfg [options] package_dir\n if setup_cfg.has_section(\"options\"):\n raw = setup_cfg.get(\"options\", \"package_dir\", fallback=None)\n if raw:\n # can be \"=\\nsrc\" or mapping lines\n lines = [l.strip() for l in raw.splitlines() if l.strip()]\n # accept simple \"=src\" or \"\" = \"src\"\n for ln in lines:\n m = re.match(r'^(\"?\\'?)*\\s*=?\\s*(\"?\\'?)*\\s*(?P[^#;]+)$', ln)\n if m:\n roots.add((root / m.group(\"path\").strip()).resolve())\n # setup.cfg [options.packages.find] where\n if setup_cfg.has_section(\"options.packages.find\"):\n where = setup_cfg.get(\"options.packages.find\", \"where\", fallback=None)\n if where:\n for w in re.split(r\"[,\\s]+\", where):\n if w:\n roots.add((root / w).resolve())\n return [r for r in roots if r.exists()]\n\ndef explicit_modules_from_config(pyproject, setup_cfg):\n mods = set()\n # pyproject (tool.setuptools) py-modules / packages\n st = ((pyproject.get(\"tool\", {}) or {}).get(\"setuptools\", {}) or {})\n for key in (\"py-modules\", \"packages\"):\n val = st.get(key)\n if isinstance(val, list):\n mods.update(val)\n # setup.cfg [options] py_modules / packages\n if setup_cfg.has_section(\"options\"):\n for key in (\"py_modules\", \"packages\"):\n raw = setup_cfg.get(\"options\", key, fallback=None)\n if raw:\n for tok in re.split(r\"[\\s,]+\", raw.strip()):\n if tok and tok != \"find:\":\n mods.add(tok)\n return sorted(mods)\n\ndef read_top_level_from_egg_info(root):\n # editable installs often leave ./.egg-info/top_level.txt\n for ei in root.rglob(\"*.egg-info\"):\n tl = ei / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n # also consider dist-info during local builds\n for di in root.rglob(\"*.dist-info\"):\n tl = di / \"top_level.txt\"\n if tl.exists():\n try:\n names = [l.strip() for l in tl.read_text(encoding=\"utf-8\").splitlines() if l.strip()]\n if names:\n return names\n except Exception:\n pass\n return None\n\ndef walk_candidates(roots):\n \"\"\"Return set of plausible top-level import names under candidate roots.\"\"\"\n cands = set()\n for r in roots:\n for path in r.rglob(\"__init__.py\"):\n try:\n pkg_dir = path.parent\n # skip excluded dirs anywhere in the path\n if any(part in EXCLUDE_DIRS for part in pkg_dir.parts):\n continue\n # Construct package name relative to the nearest search root\n try:\n rel = pkg_dir.relative_to(r)\n except Exception:\n continue\n if not rel.parts:\n continue\n top = rel.parts[0]\n if top.startswith(\"_\"):\n # usually private tooling\n continue\n cands.add(top)\n except Exception:\n pass\n # standalone modules at top-level of roots (py_modules case)\n for mod in r.glob(\"*.py\"):\n if mod.stem not in (\"setup\",):\n cands.add(mod.stem)\n return sorted(cands)\n\ndef score_candidates(cands, dist_name):\n \"\"\"Assign a score preferring names that match the dist name.\"\"\"\n scores = {}\n n_dist = _norm(dist_name) if dist_name else None\n prefer = None\n if dist_name and dist_name.lower() in EXCEPTIONS:\n prefer = EXCEPTIONS[dist_name.lower()]\n # also try normalized exception keys (e.g. capitalization)\n for k, v in EXCEPTIONS.items():\n if _norm(k) == _norm(dist_name or \"\"):\n prefer = v\n\n for c in cands:\n s = 0\n if prefer and _norm(c) == _norm(prefer):\n s += 100\n if n_dist and _norm(c) == n_dist:\n s += 80\n if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))):\n s += 20\n # shorter, simpler names get a slight bump\n s += max(0, 10 - len(c))\n scores[c] = s\n return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores\n\ndef detect(root: str, return_all=False):\n root = pathlib.Path(root).resolve()\n\n pyproject = read_pyproject(root)\n setup_cfg = read_setup_cfg(root)\n dist_name = dist_name_from_config(pyproject, setup_cfg)\n\n # 1) top_level.txt (best signal if present)\n top = read_top_level_from_egg_info(root)\n if top:\n if return_all:\n return top\n # If multiple, score them\n ordered, _ = score_candidates(top, dist_name or \"\")\n return [ordered[0]]\n\n # 2) explicit declarations (py_modules / packages lists)\n explicit = explicit_modules_from_config(pyproject, setup_cfg)\n\n # 3) find correct search roots (src layout, package_dir, etc.)\n roots = package_roots_from_config(root, pyproject, setup_cfg)\n\n # 4) walk code to infer candidates\n walked = walk_candidates(roots)\n\n # merge explicit + walked\n cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup\n\n # 5) fallback from dist name heuristics/exceptions if still empty\n if not cands and dist_name:\n # exception or simple normalization\n guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r\"[-\\.]+\", \"_\", dist_name)\n cands = [guess]\n\n if not cands:\n return []\n\n if return_all:\n # return ordered list\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return ordered\n else:\n ordered, _ = score_candidates(cands, dist_name or \"\")\n return [ordered[0]]\n\ndef main():\n ap = argparse.ArgumentParser(description=\"Detect the top-level Python import name for a repo.\")\n ap.add_argument(\"--repo-root\", default=\".\", help=\"Path to repository root\")\n ap.add_argument(\"--all\", action=\"store_true\", help=\"Print all plausible names (JSON list)\")\n args = ap.parse_args()\n\n names = detect(args.repo_root, return_all=args.all)\n if not names:\n sys.exit(1)\n if args.all:\n print(json.dumps(names))\n else:\n print(names[0])\n\nif __name__ == \"__main__\":\n main()\nPY\n chmod +x /usr/local/bin/detect_import_name\n}\n\ninstall_detect_import_name\n\n# -------- Script body --------\n\ninstall_profile_helpers\n# shellcheck disable=SC1091\nsource /etc/profile.d/asv_utils.sh\n\n# Ensure base micromamba is active for introspecting ASV config\nmicromamba activate base\n\ninstall_detect_import_name\ninstall_smokecheck\n\nIMPORT_NAME=\"$(detect_import_name || true)\"\nif [[ -z \"$IMPORT_NAME\" ]]; then\n echo \"WARN: Could not determine import name; the pkg stage will fall back to local detection.\"\nfi\n\n\n# Move into the directory that contains asv.*.json\ncd_asv_json_dir || { echo \"No 'asv.*.json' file found.\" >&2; exit 1; }\n\nCONF_NAME=\"$(asv_conf_name || true)\"\nif [[ -z \"${CONF_NAME:-}\" ]]; then\n echo \"No 'asv.*.json' file found.\" >&2\n exit 1\nfi\n\n# Make sure tomli is available in base for pyproject parsing\nmicromamba install -y -n base -c conda-forge tomli >/dev/null\n\n# Read python versions from the ASV config\nPY_VERSIONS=$(python - <=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.71\" \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='1cfe404e5d2c2a807162d4e3d440b6969e14d87b', commit_date=1732305078.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build and test dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.36\" \"numpy>=1.21.0\" pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython tidynamics\n \n # Ensure build system requirements are met\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel\n \n # Look for package in subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Build and install MDAnalysis in development mode\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .[test,analysis]\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .[test,analysis]\n else\n echo \"Neither pyproject.toml nor setup.py found. Cannot install package.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='1eca65591fa402584dd29b6d1a02111af30e68eb', commit_date=1691706686.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install additional required packages\n cd ${ROOT_PATH}\n if [ -f \"package/requirements.txt\" ]; then\n # Install requirements one by one to better handle failures\n while IFS= read -r requirement || [[ -n \"$requirement\" ]]; do\n # Skip empty lines and comments\n [[ -z \"$requirement\" || \"$requirement\" =~ ^#.*$ ]] && continue\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < package/requirements.txt\n fi\n \n # Build and install MDAnalysis\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='264d6f9357a978444baa1f99411a03453664ab2b', commit_date=1672867721.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='26880f0f0a4bb831fca9668650400858c34f442b', commit_date=1602889606.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='2ee4e9da5aa3a2c1b21fc3d1897bd70e0ab2064d', commit_date=1602770152.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis with optimized flags\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='35d9d2e3ab08e7e6741b57fe02a7215fe3b91a6c', commit_date=1742597504.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and tools\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install networkx matplotlib seaborn netCDF4 mmtf-python gsd biopython parmed griddataformats joblib threadpoolctl scikit-learn hypothesis codecov\n \n # Install MDAnalysis in editable mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='392c8ae5391e20f5e496f7ac03dae08c44deca3b', commit_date=1646727863.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Check for pyproject.toml or setup.py and install accordingly\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='39b0e4cc184725cd0e5e710780c8154ed4de9f4f', commit_date=1524419705.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools>=45.0\" wheel \"cython>=0.29\" numpy\n \n # Install MDAnalysis in editable mode with specific dependencies for this older version\n micromamba run -n \"asv_${version}\" pip install --no-deps --verbose --editable \"${ROOT_PATH}\"\n \n # Install runtime dependencies appropriate for the 2018 version\n micromamba run -n \"asv_${version}\" pip install six mmtf-python mock biopython networkx gsd scipy matplotlib\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='404040598f78db05882fa5b2bba1d35fc6a30510', commit_date=1605754667.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='42c541771ab7aee318783d296caa3e10b33f53eb', commit_date=1613225552.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13,<3.0\" \"numpy>=1.16.0\" \"setuptools>=40.8.0\" wheel\n \n # Additional dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd matplotlib netcdf4 networkx\n \n # Build and install MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='4365f3b07c1bf2ebcf16424b26162102954c5b90', commit_date=1591777205.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='45e56e8314c278e3eb98ed7a6029b74e7435e8be', commit_date=1598362533.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='474be5bbe32270bb9ddf02dc3cab74d3c1312c5e', commit_date=1728274662.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n \n # Check if we're in the right directory and if setup.py exists\n cd ${ROOT_PATH}\n if [ ! -f \"setup.py\" ] && [ ! -f \"pyproject.toml\" ]; then\n # Try to find the package directory\n if [ -d \"package\" ]; then\n cd package\n elif [ -d \"mdanalysis\" ]; then\n cd mdanalysis\n fi\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='481e36a3aa8767c4b895eabfd7ef8b89132ab611', commit_date=1723835551.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx tidynamics biopython\n \n # Check if we're in the right directory and if setup.py exists\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd \"${ROOT_PATH}/package\" || exit 1\n fi\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='4fafd51de84d5b89be0559a412acefde0040847c', commit_date=1726273184.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='52b3d261240efed0546d9f15ee42c7f445e72c13', commit_date=1693261706.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with version constraints for Python 3.8\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython\n \n # Install additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock biopython networkx gsd\n \n # Install package in development mode\n cd \"${ROOT_PATH}\"\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found. Attempting direct install.\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-deps --no-build-isolation .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='559528f3349bebcaeb82e7f97fd6b76ae8aecce2', commit_date=1501861121.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install numpy and other build dependencies separately to handle version constraints\n if [[ \"$version\" == \"2.7\" ]]; then\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<1.17\" \"scipy<1.3\" \"cython<3.0\" pytest setuptools\n else\n micromamba install -y -n \"asv_${version}\" -c conda-forge \"numpy<2.0\" \"scipy<2.0\" \"cython<3.0\" pytest setuptools\n fi\n \n # Install compilers and build tools\n micromamba install -y -n \"asv_${version}\" -c conda-forge compilers wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5948963e0e9d92c9ddd0829ba3df3d9d496bbf01', commit_date=1672872621.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install --upgrade \"pip<24.0\" setuptools wheel\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\"\n \n # Install package requirements from package directory\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install with relaxed constraints and ignore errors\n sed 's/>=/~=/g' ${ROOT_PATH}/package/requirements.txt | grep -v \"numpy\" | micromamba run -n \"asv_${version}\" pip install -r /dev/stdin || true\n fi\n \n # Install additional dependencies needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"biopython~=1.80\" fasteners griddataformats\n \n # Install the package in development mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n else\n echo \"No pyproject.toml or setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='59f4e395178240d5e3f36088d7a4d98ddd0e3607', commit_date=1680135568.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx-rtd-theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5bf1979b36cd4d5f55d691e6927aa606fbeb8791', commit_date=1703619619.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install \"numpy>=1.21.0\" \"cython>=0.29.32\" \"mmtf-python>=1.0.0\" gsd biopython scipy pytest\n\n # Look for package directory containing setup.py\n cd \"${ROOT_PATH}\"\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n fi\n\n # Set environment variables to help with compilation\n export CFLAGS=\"-std=c99 -O3 -funroll-loops -fsigned-zeros\"\n export NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION\n\n # Build and install MDAnalysis with specific build settings\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5c19974c43125c94f98ab45d2f9965c70e427eec', commit_date=1541518721.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies first\n micromamba run -n \"asv_${version}\" pip install --no-deps \"cython>=0.16\" numpy>=1.10.0 biopython>=1.71 networkx>=1.0 griddataformats>=0.4.0 six>=1.4.0 fasteners mmtf-python>=1.0.0 tqdm>=4.43.0 packaging>=20.0 pytest>=3.3.0 mock\n \n # Build and install MDAnalysis in development mode with specific numpy version constraint\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"numpy>=1.16.5,<2.0\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='5cf8c5599e1a27c53e774c436b4e03fe71080f7a', commit_date=1534279531.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional dependencies required by MDAnalysis\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python tqdm mock joblib\n\n # Build and install MDAnalysis with appropriate flags\n if [[ \"$version\" == \"2.7\" ]]; then\n # For Python 2.7, use a more conservative installation approach\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable ${ROOT_PATH}\n else\n # For Python 3.x, use build isolation disabled for better compatibility\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='61e236d45c52030d74ba6277c0a59e8a43a13ea9', commit_date=1593710203.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with pinned versions appropriate for 2020\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29,<0.30\" \"numpy>=1.13.0,<1.19\" \"biopython>=1.71,<1.78\" \\\n \"networkx>=1.0,<2.5\" \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \"scipy>=1.0.0,<1.6\" \\\n \"joblib<1.0\" \"mock\" \"psutil<5.8\" \"pytest<6.0\" \"pytest-cov\" \"pytest-xdist<2.0\" \"hypothesis<6.0\"\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='62c35d49bd9458f2b5057d28d4904391a4a38513', commit_date=1534780584.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='6bc52ec2f0744cdf3c63a2e43aff232381ec4dd1', commit_date=1669766518.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n\n # Ensure we're in the package directory\n cd ${ROOT_PATH}/package || cd ${ROOT_PATH}\n\n # Try to build and install MDAnalysis\n if [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither setup.py nor pyproject.toml found in current directory\"\n exit 1\n fi\n\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='6d5ef34292899958ea2a0148388ecc47cf499da1', commit_date=1620729923.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='73cd1e69be88f1b47b1327c1918c0ad326bec302', commit_date=1603501474.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\"\n \n # Install build dependencies \n micromamba install -y -n \"asv_${version}\" -c conda-forge numpy scipy cython pytest compilers setuptools pip wheel\n \n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='740cae26820eba538f9990ec904adc9f39a65b2e', commit_date=1619881090.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Install MDAnalysis in development mode with test dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py clean\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='740e74e8c61ea01a4b2120bd369b11a58cb9c304', commit_date=1728331627.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create base environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-cache-dir \\\n cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme \\\n mmtf-python mock gsd griddataformats tidynamics \\\n setuptools wheel build\n\n # Look for package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n\n # Try to build and install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found in current directory\"\n exit 1\n fi\n cd ${ROOT_PATH}/benchmarks\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='79dead30cc19cd821617a6746663a68709b276e0', commit_date=1754497815.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install meson-python ninja cython packaging\n # Build and install MDAnalysis with meson\n cd ${ROOT_PATH}\n # Ensure we're in the package directory with pyproject.toml\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n fi\n micromamba run -n \"asv_${version}\" python -m pip install --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='7c468a46344d17f91d44059332fcc533dad01cde', commit_date=1567026117.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython networkx matplotlib gsd griddataformats tidynamics\n \n # Install package in development mode with explicit build step\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='81b8ef51e5bc1aa2824294ac6c52818c74975658', commit_date=1741727282.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python mock gsd griddataformats scipy matplotlib biopython networkx tidynamics\n\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n elif [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"No setup files found in expected locations. Please check repository structure.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='84ee67b99fc3bf165d2f58057fac3315d8bb33af', commit_date=1727431157.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install packaging\n \n # Try to find and build from package directory\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n fi\n else\n echo \"Package directory not found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='8599e47b77a89486a1ffe97a3f146751611d9595', commit_date=1680132537.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/package/\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}/testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='8c3577f5a72bee654d94367e4bef51791ffa5d0b', commit_date=1591177328.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install package dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six gsd mmtf-python networkx matplotlib biopython griddataformats GridDataFormats scipy tqdm joblib mock\n\n # Install package in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='93c4a97761469a2fd013c280d04435ae178f2c44', commit_date=1693273052.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n # Skip Python versions < 3.9 as MDAnalysis requires Python 3.9+\n if [[ $(echo \"$version\" | cut -d. -f2) -lt 9 ]]; then\n echo \"Skipping Python $version as MDAnalysis requires Python 3.9+\"\n continue\n fi\n\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm scipy biopython\n\n # Ensure we're in the root directory\n cd ${ROOT_PATH}\n \n # Look for package subdirectory\n if [ -d \"package\" ]; then\n cd package\n fi\n \n # Try to build and install the package\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n else\n echo \"No pyproject.toml or setup.py found in current directory\"\n exit 1\n fi\n \n # Install remaining dependencies\n micromamba run -n \"asv_${version}\" pip install -e .[test,doc]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='95fedb590d1afd268c0a643302cd703b8756f5d3', commit_date=1685194826.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Check Python version requirement (MDAnalysis needs >=3.9)\n if (( $(echo \"$version\" | cut -d. -f1,2 | sed 's/\\.//' | bc) < 39 )); then\n echo \"Skipping Python $version as MDAnalysis requires Python >=3.9\"\n continue\n fi\n\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge \\\n python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \\\n numpy scipy cython pytest compilers \\\n gsd networkx matplotlib tqdm pandas\n\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n cd ${ROOT_PATH}\n\n # Install build dependencies and package\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e \".[test,analysis]\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='9a2cd43fccd4426f91b195ea9902e5b78a6c2e3b', commit_date=1710090427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel matplotlib pandas\n micromamba run -n \"asv_${version}\" pip install GridDataFormats mmtf-python networkx fasteners mda-xdrlib waterdynamics pathsimanalysis mdahole2\n \n # Install the package in editable mode\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='9ba1ab964920acfc986d8e264f78c965e062e9d0', commit_date=1511010257.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with basic dependencies\n # Using older versions since this is a 2017 commit\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<=1.13\" \"scipy<=1.0\" \"cython<=0.27\" setuptools wheel pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install additional build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"sphinx<1.7\" \"sphinx_rtd_theme<0.3\"\n \n cd ${ROOT_PATH}\n # Build and install MDAnalysis with optimizations disabled to avoid timeouts\n CFLAGS=\"-O0\" CXXFLAGS=\"-O0\" micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a1bca526f473325f91c12fb15c887243a2a9244b', commit_date=1646736472.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n # Build and install MDAnalysis in development mode with older Cython version\n cd ${ROOT_PATH}\n # First install the core package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable package/\n # Then install the test suite\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable testsuite/\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a3672f216aa162f2549d1712fad0118b2cc98d49', commit_date=1734398599.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install gsd mmtf-python networkx scipy tqdm packaging matplotlib biopython griddataformats\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n \n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\n else\n echo \"Neither pyproject.toml nor setup.py found in the root directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a6034750dc47c8904a297efa184292c73c0690bb', commit_date=1692115614.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy \"packaging<22\" pytest\n \n # Install package in development mode\n if [ -f \"package/setup.py\" ]; then\n cd package\n fi\n \n # Try to build and install\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n \n # Return to root directory\n cd ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a64eed98b38307e4699b59eef9f265cbead37ad6', commit_date=1607980019.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Install MDAnalysis in development mode with explicit numpy dependency\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --editable .\n micromamba run -n \"asv_${version}\" pip install numpy scipy\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='a6edec02af44fbb4589ef1da25a54a4cc8895ee4', commit_date=1671201733.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge gsd netcdf4 bzip2 mmtf-python\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install pytest-xdist sphinx sphinx_rtd_theme\n\n # Ensure we're in the root directory before building\n cd \"${ROOT_PATH}\"\n\n # Build and install MDAnalysis with optimizations\n export CFLAGS=\"-O3\"\n export CXXFLAGS=\"-O3\"\n \n # First try pyproject.toml-based install\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n # Fallback to setup.py if exists\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" python setup.py develop\n else\n echo \"Neither pyproject.toml nor setup.py found. Checking package subdirectories...\"\n # Check for package subdirectories\n if [ -f \"package/pyproject.toml\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v -e .\n cd \"${ROOT_PATH}\"\n elif [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" python setup.py develop\n cd \"${ROOT_PATH}\"\n else\n echo \"No installation method found. Build failed.\"\n exit 1\n fi\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='aaa4456db50e237cf580c8c986c00d7c5fbe3075', commit_date=1703622753.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n micromamba run -n \"asv_${version}\" pip install mmtf-python mock gsd networkx biopython\n\n # Navigate to package directory if needed\n if [ ! -f \"${ROOT_PATH}/setup.py\" ]; then\n cd ${ROOT_PATH}/package || exit 1\n fi\n \n # Build and install MDAnalysis with specific compiler flags and additional dependencies\n export CFLAGS=\"-DXDR_GETPOS_RETURNS_UINT32=1 -DXDR_SETPOS_RETURNS_INT=1\"\n # Try installing with conda-forge compilers first\n micromamba install -y -n \"asv_${version}\" -c conda-forge gcc gxx\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b3208b39aab61be53f8b610f1fef628f83262205', commit_date=1725909222.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip wheel setuptools build\n micromamba run -n \"asv_${version}\" pip install numpy cython pytest pytest-xdist sphinx sphinx_rtd_theme\n # Try to find and use setup.py in package subdirectories\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n cd package\n fi\n # Install MDAnalysis in development mode with verbose output\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b5ba8278b3e09b80109aa06f77832be00f8752f0', commit_date=1510724778.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # For older versions of MDAnalysis, build_ext is needed before install\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b672b595b57f6862d486391d646cf30c31fd8501', commit_date=1598490143.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis build dependencies\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy\n \n # Build and install MDAnalysis with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='b7f36bd148f1eed47f2dc935b89d28c8cae468c4', commit_date=1541446943.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install build dependencies first\n if [ -f \"${ROOT_PATH}/requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n fi\n \n # Build and install MDAnalysis with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='bc95e31af1bd1a583161318ab381d005452d48ea', commit_date=1611524871.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='bdb1352f4743aa2101ba2d6b3c9c4fbeb5ae8584', commit_date=1680212962.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"numpy<2.0.0\" cython pytest pytest-xdist sphinx sphinx-sitemap sphinx_rtd_theme\n # Install optional dependencies that may be needed for benchmarks\n micromamba run -n \"asv_${version}\" pip install gsd parmed biopython networkx tidynamics\n # Install MDAnalysis in editable mode with all dependencies\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .[test,analysis]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='be4b6ee8fa243a0d9e18b936a3d018f2b7418914', commit_date=1650356257.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies with specific versions to avoid Cython errors\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython==0.29.36\" \"numpy<2.0.0\"\n\n # Install required dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"networkx>=2.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.5.0\" \\\n \"biopython>=1.80\" \\\n \"griddataformats>=0.4.0\" \\\n \"packaging\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\"\n\n # Build and install MDAnalysis\n cd \"${ROOT_PATH}\"\n if [ -f \"package/setup.py\" ]; then\n cd package\n # Use --no-build-isolation to ensure our carefully installed dependencies are used\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps --editable .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c07b5c8897688d778e57e1ef34be86f58c969fe7', commit_date=1607478583.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis components separately\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\n \n cd ${ROOT_PATH}/testsuite\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c3289d8994936ce7dbe7842e8877d597ca96360a', commit_date=1752273263.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy\n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}/package\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c565b9d3a11508604a1217e37199ac17a8c618f2', commit_date=1654106359.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install mmtf-python gsd networkx biopython matplotlib tqdm pandas tidynamics\n\n # Build and install MDAnalysis from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n # Use build system if pyproject.toml exists\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n # Use setup.py if available\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py or pyproject.toml found in package root directory.\"\n # Try looking in package subdirectory\n if [ -d \"package\" ] && [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Could not find build configuration. Cannot build package.\"\n exit 1\n fi\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c56e8df543e1aba21959a7c7b3029eacd57d9130', commit_date=1661799771.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy<2.0.0\" scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n\n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3,<4.0.0\" \\\n \"mock>=3.0.5\" \\\n \"packaging>=20.4\" \\\n \"pytest-xdist>=1.31.0\" \\\n \"pytest-cov>=2.10.1\" \\\n \"pytest-timeout>=1.4.2\" \\\n \"hypothesis>=5.19.0\" \\\n \"psutil>=4.3.1\" \\\n \"biopython>=1.80\" \\\n \"duecredit>=0.9.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"tqdm>=4.43.0\" \\\n \"joblib>=0.12\" \\\n \"fasteners>=0.15\" \\\n \"networkx>=2.0\" \\\n \"threadpoolctl>=2.0.0\"\n\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Build and install MDAnalysis in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c5cbc2551c1175e8d13887783c7ab2894607ac92', commit_date=1671293813.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with required dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme\n \n # Additional dependencies that might be needed for compilation\n micromamba install -y -n \"asv_${version}\" -c conda-forge gsd netcdf4 bzip2 gcc gxx\n \n # Install MDAnalysis with verbose output and no build isolation\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c620b141f018628356bb9cdd16eefa640b6080ba', commit_date=1671200774.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n \"numpy<2.0\" \\\n \"cython<3.0\" \\\n setuptools \\\n wheel \\\n pip \\\n build\n\n # Try building and installing from source\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" python -m build\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"Neither pyproject.toml nor setup.py found in ${ROOT_PATH}\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c6f1a5a5663913f00cc5f727ad0e662bbf23f18f', commit_date=1617010037.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install \"cython<3.0\" \"numpy<2.0\" setuptools wheel\n \n # Build and install MDAnalysis in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c70504d99e8b6ff7f61778cff1f5956da708ddad', commit_date=1619628547.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='c815614b5ae8ed86eaa0d68e10451fde7e72242b', commit_date=1671293292.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies with compatible versions\n micromamba run -n \"asv_${version}\" pip install \"setuptools<75.0.0\" wheel \"cython<3.0.0\" \"numpy<2.0.0\"\n \n # Install package requirements\n if [ -f \"${ROOT_PATH}/package/requirements.txt\" ]; then\n # Install requirements one by one to handle dependencies better\n while IFS= read -r requirement; do\n micromamba run -n \"asv_${version}\" pip install --no-deps \"$requirement\" || true\n done < \"${ROOT_PATH}/package/requirements.txt\"\n fi\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n if [ -f \"pyproject.toml\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --no-deps -e .\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n else\n echo \"No pyproject.toml or setup.py found\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='cb05695ca422c216406a0eae4040c782a2a03812', commit_date=1629822068.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel cython numpy scipy\n \n # Install optional dependencies that might be needed for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --no-deps matplotlib networkx gsd biopython\n \n # Install the package in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='d2e22ffb0cb46af5266e39b940d7f00c1ca293c1', commit_date=1534167809.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='d73b653f19e8446bbb9de51bb41d71f78d148d30', commit_date=1534803427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install numpy cython\n \n # Build and install MDAnalysis with test dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='da77f2bead8c4a634d2ba5b61cd7d7f841c01c0b', commit_date=1671205345.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install build setuptools wheel cython numpy pytest pytest-xdist\n # Build and install MDAnalysis using setup.py since pyproject.toml is not found\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='daee516f23ead8e42c2e42b7636f9ec243ab306e', commit_date=1603119467.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest sphinx sphinx-sitemap sphinx-rtd-theme\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n if [ -d \"package\" ]; then\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./package\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable ./testsuite\n else\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='dcfa60a2ee0bcee7f54e969666950941905d825a', commit_date=1621773545.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e213f2be8e8741efc7cdddd35dc4bd2d88e0ff85', commit_date=1745000938.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install build wheel setuptools cython numpy\n \n # Install package-specific dependencies\n cd ${ROOT_PATH}/package\n if [ -f \"requirements.txt\" ]; then\n micromamba run -n \"asv_${version}\" pip install -r requirements.txt\n fi\n \n # Build and install package\n cd ${ROOT_PATH}\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in package directory\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e3966303776577e15a043daeceff5a591370398a', commit_date=1534255980.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n \n # Install additional build dependencies\n micromamba run -n \"asv_${version}\" pip install six biopython networkx gsd griddataformats mmtf-python mock joblib\n \n # Install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='e8fbd529fc55cb187d38bdef141d74757f22bdc5', commit_date=1594518308.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # MDAnalysis specific build dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps -r ${ROOT_PATH}/requirements.txt\n # Install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}[test]\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='eab18cb8418ddb1dd72b44f474833de4a2999884', commit_date=1654100638.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n\n # Install build dependencies and required packages\n micromamba run -n \"asv_${version}\" micromamba install -y -c conda-forge \\\n biopython \\\n gsd \\\n griddataformats \\\n fasteners \\\n mmtf-python \\\n networkx \\\n scipy \\\n matplotlib \\\n tqdm \\\n \"cython<3.0\" \\\n \"numpy<2.0\" \\\n pip \\\n setuptools \\\n wheel \\\n build\n\n # Try building and installing with specific C compiler flags\n cd ${ROOT_PATH}\n export CFLAGS=\"-fcommon\"\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -v .\n\n # If that fails, try alternative installation method\n if [ $? -ne 0 ]; then\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='eae5845cf5488ae1db1cdcc2075f68406291721e', commit_date=1517964764.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps six mmtf-python mock biopython networkx gsd joblib setuptools wheel\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='ee4759293e1a4a5109c6b66e133acb1af7d24b0d', commit_date=1567703043.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest\n \n # Install MDAnalysis in development mode with specific build settings\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='f5e9603f35b1e1587c1a1583793374fbfa0f80c5', commit_date=1629232880.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install \\\n \"cython>=0.29.13\" \\\n \"numpy>=1.16.0\" \\\n \"biopython>=1.74\" \\\n \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \\\n \"mmtf-python>=1.0.0\" \\\n \"gsd>=1.9.3\" \\\n \"scipy>=1.0.0\" \\\n \"joblib\" \\\n \"mock\" \\\n \"packaging\" \\\n \"pytest\" \\\n \"pytest-xdist\" \\\n \"pytest-cov\" \\\n \"pytest-timeout\" \\\n \"psutil\" \\\n \"hypothesis\" \\\n \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \\\n \"fasteners\" \\\n \"duecredit\"\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='f7a6f47e9c8c4637770c2c0cc0c20da841d11622', commit_date=1516881817.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers setuptools pip wheel\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps biopython griddataformats gsd networkx matplotlib mmtf-python tqdm tidynamics six\n \n # Build and install MDAnalysis in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fb9e0bc786b21c15cefe0027fc83a441e1b19950', commit_date=1685186356.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n\n # Install ASV\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install build dependencies for MDAnalysis\n micromamba run -n \"asv_${version}\" pip install cython numpy pytest pytest-xdist sphinx sphinx_rtd_theme setuptools wheel build\n \n # Additional dependencies specific to MDAnalysis\n micromamba run -n \"asv_${version}\" pip install packaging mmtf-python gsd networkx matplotlib tqdm pandas biopython griddataformats scipy\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n \n # Build and install MDAnalysis\n if [ -f \"package/setup.py\" ]; then\n cd package\n micromamba run -n \"asv_${version}\" pip install -e .\n cd ..\n elif [ -f \"setup.py\" ]; then\n micromamba run -n \"asv_${version}\" pip install -e .\n else\n echo \"No setup.py found in expected locations\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fe22dc3794f1f5d466f9128e4c7050fa0d58e62f', commit_date=1619962288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis-specific dependencies\n micromamba run -n \"asv_${version}\" pip install --no-deps gsd pmda gridDataFormats mmtf-python mock biopython networkx\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install cython numpy setuptools wheel\n \n # Build and install MDAnalysis in development mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='fed8be34a3434a621bacd438d2f9307139a24511', commit_date=1511384425.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Skip Python 2.7 as it's not available in conda-forge anymore\n if [[ \"$version\" == \"2.7\" ]]; then\n echo \"Skipping Python 2.7 as it's no longer supported\"\n continue\n fi\n\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" \"numpy>=1.16\" \"scipy>=1.5\" cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies for the 2017 commit\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.16\" \"biopython>=1.71\" \\\n \"networkx>=1.0\" \"griddataformats>=0.4.0\" \"six>=1.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.4.0\" \\\n \"mock>=2.0.0\" \"psutil>=4.0.0\" \"fasteners>=0.12.0\" \"matplotlib>=1.5.1\" \\\n \"tqdm>=4.43.0\" \"packaging>=20.0\" \"pytest>=3.3.0\" \"pytest-xdist>=1.4.0\" \"pytest-cov>=2.5.1\"\n \n # Build and install MDAnalysis\n cd ${ROOT_PATH}\n # Use older build approach appropriate for 2017 commit\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation -e .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha='ff7ffa10901e2df2be12c3d3dd78e4e0a262e90e', commit_date=1614816697.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n # Create environment with core dependencies\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n \n # Install MDAnalysis dependencies with specific version constraints\n micromamba run -n \"asv_${version}\" pip install \"cython>=0.29.13\" \"numpy>=1.16.0\" \"biopython>=1.74\" \"networkx>=1.0\" \\\n \"griddataformats>=0.4.0\" \"mmtf-python>=1.0.0\" \"gsd>=1.9.3\" \"scipy>=1.0.0\" \"joblib\" \"mock\" \"packaging\" \\\n \"pytest\" \"pytest-xdist\" \"pytest-cov\" \"pytest-timeout\" \"psutil\" \"hypothesis\"\n \n # Build and install MDAnalysis in development mode with explicit numpy requirement\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='mdanalysis', repo='mdanalysis', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy \"cython<3\" joblib threadpoolctl pytest compilers meson-python\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # if maintainer/install_all.sh exists run it with develop\n if [[ -f \"maintainer/install_all.sh\" ]]; then\n micromamba activate \"asv_${version}\"\n working_dir=$(pwd)\n cd \"$ROOT_PATH\" || exit 1\n bash maintainer/install_all.sh develop\n cd \"$working_dir\" || exit 1\n else\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n fi\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='modin-project', repo='modin', sha='be3e716107a185961fc209c343b0feefe0fb9751', commit_date=1684841207.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='modin-project', repo='modin', sha='c5aac3ef99d14305ea9a130e14155fc37495e199', commit_date=1608304159.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='napari', repo='napari', sha='3b6800763f97452ccf8230abf5a65fd6beedd247', commit_date=1606539287.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='napari', repo='napari', sha='dfeefb43af6538dd1e5ad7820128dfc844dc54b1', commit_date=1723973799.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='networkx', repo='networkx', sha='1071e14b81baaa4f0becc1849e85839ae8c671d9', commit_date=1716269137.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='networkx', repo='networkx', sha='81df24ce59b5b4fddfa65cd0a57db96748bba904', commit_date=1745208237.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='newton-physics', repo='newton', sha='5b18850fd8243e4c707b596880c01c1966e5168e', commit_date=1753825967.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='newton-physics', repo='newton', sha='cd07ab2c989df6392253a77e82333ec57a433e94', commit_date=1751556054.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nilearn', repo='nilearn', sha='6c1a76e37cf1c0dd6b800271cb3994f3efd38d07', commit_date=1744125996.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nilearn', repo='nilearn', sha='73fe9520ea705056f89b1cd5982947de13d515a0', commit_date=1754650581.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='numpy', repo='numpy', sha='4092a9e160cc247a4a45724579a0c829733688ca', commit_date=1459109632.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='numpy', repo='numpy', sha='9c3f0bb9955d530d43487f2ab800c765c83a3ea7', commit_date=1716460609.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='5495dc762dae2f09b648588d0f979e03ea3ef88b', commit_date=1741386626.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --verbose --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='626fc946dcfe2150b6aed956c57e89ec907ca44a', commit_date=1746035128.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install setuptools wheel\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Build and install WARP with CUDA support\n CUDA_PATH=/usr/local/cuda micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='9958a89058d16e7ac634c46b37d9aad6c14b3f10', commit_date=1740864850.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # WARP specific build requirements\n micromamba run -n \"asv_${version}\" pip install torch cuda-python\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" python build_lib.py\n # Now install in editable mode\n WARP_BUILD_CUDA=1 micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='a447d70c372b4dbe1b574ebf587c51c9742272db', commit_date=1748714623.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy pytest ninja cmake compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific build steps\n micromamba run -n \"asv_${version}\" pip install warp-lang\n # Run build_lib.py first as required by the error message\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Now try the editable install\n micromamba run -n \"asv_${version}\" pip install --no-deps --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='a81f7e773f2905e06fe52262002c2e34a5daa4d8', commit_date=1743362346.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy cmake ninja pytest\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install --upgrade pip setuptools wheel\n # Run build_lib.py first as required by the error message\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='d641e89a288746c380ef9b4871f45b0d862fd69e', commit_date=1755703901.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython pytest ninja cmake\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n # WARP specific dependencies and build requirements\n micromamba run -n \"asv_${version}\" pip install meson-python build wheel setuptools\n # First run build_lib.py to generate required libraries\n cd ${ROOT_PATH}\n # Add missing climits header to fix build error\n sed -i '1i\\#include ' warp/native/bvh.cpp\n micromamba run -n \"asv_${version}\" python build_lib.py\n # Then install WARP without CUDA support since error suggests basic build issues first\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable .\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha='dc693d89d5b85ac7e72c7f4e226eb58a5d54131f', commit_date=1751384285.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Ensure the necessary libraries are built before attempting to install the package\n micromamba run -n \"asv_${version}\" python ${ROOT_PATH}/build_lib.py\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='nvidia', repo='warp', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# only run the below if condition if bvh.cpp is present\ngrep -q '^#include ' \"${ROOT_PATH}/warp/native/bvh.cpp\" || sed -i 's|#include |#include \n#include |' \"${ROOT_PATH}/warp/native/bvh.cpp\"\n\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/build_lib.py\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='optuna', repo='optuna', sha='445048a74c9090e60a82a49605044cc42727642a', commit_date=1650874136.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='optuna', repo='optuna', sha='c634449ebbd2160ee44a1845d1efd6c20ee200ae', commit_date=1714538588.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pandas-dev', repo='pandas', sha='2f4c93e8322775a0bb06429a02429b95ba6abb26', commit_date=1698253642.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pandas-dev', repo='pandas', sha='94a8af55b703fbaea19da9902a9790c7b93dc0ad', commit_date=1686591905.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='posthog', repo='posthog', sha='16075ff5c3671587db9e6a6a3ed396058d0f413b', commit_date=1733419912.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to fix the pyproject.toml issue by ensuring the 'version' field is present\n if ! grep -q \"version\" \"${ROOT_PATH}/pyproject.toml\"; then\n echo \"version = '0.1.0'\" >> \"${ROOT_PATH}/pyproject.toml\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='posthog', repo='posthog', sha='3578a0c1c2b6f4425dc0fddf31d3d256bbf3fc87', commit_date=1655908403.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pvlib', repo='pvlib-python', sha='3692427bef155a32eac525fe965ed8d407a7846e', commit_date=1660774705.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pvlib', repo='pvlib-python', sha='b8c56c5e725ed12f15342c5336f71d52ec8008ce', commit_date=1749300951.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pybamm-team', repo='pybamm', sha='b1fc5950f0d8e5c8e104e00573fdff5561818014', commit_date=1723152711.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pybamm-team', repo='pybamm', sha='e1f52ffcf9811bb7d5046af47c48a2291bfd50b8', commit_date=1653925577.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --constraint \"<3.10,>=3.7\"\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='bottleneck', sha='c5356daccdab4afc293f56d4b4ff47c154be5bcd', commit_date=1716493787.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='bottleneck', sha='dc01fad42713181b1f2bb13a965eb0651d1308b6', commit_date=1729241092.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --ignore-requires-python\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='xarray', sha='4cbb7cbd86af1ccfe2b3b98f0e36a410f86d77ef', commit_date=1523669869.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydata', repo='xarray', sha='dd6222f01a476caa96630e26d5b02fad6777a886', commit_date=1747916222.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pydicom', sha='50cd981a068c74b01d854c6cac9bb897fe0b74a9', commit_date=1726970247.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pydicom', sha='87266d96add6a6cccaa3032bbc96b0e3009c6dea', commit_date=1690047796.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install flit-core\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pynetdicom', sha='1b701e898b489d561884d20ad78920607a6d1df0', commit_date=1563786471.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pydicom', repo='pynetdicom', sha='bb1f9d164d5c408fc28e02f924b3821b92cb45ad', commit_date=1555925288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pymc-devs', repo='pymc', sha='6360b005fc610d0505f84885743215a3e09f046e', commit_date=1614035911.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pymc-devs', repo='pymc', sha='a06081e1e9649bd56e3528cb96380efdf6bb2dc0', commit_date=1710322397.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pysal', repo='momepy', sha='6467ae26e8bfca9ba91e7795ab7899aaf89c576c', commit_date=1604013921.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pysal', repo='momepy', sha='7619f2f760d9027434369114a49150e3d3a483fb', commit_date=1603224289.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-adaptive', repo='adaptive', sha='50fae4341c53439f57fcea63346ba3581bd187d4', commit_date=1665457361.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-adaptive', repo='adaptive', sha='a9bb7f612717000dd2cf6899d8ebbf479807f6f5', commit_date=1550239213.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='0422c82a80b3ec0dc7fcbc69562f99e35358ee80', commit_date=1680293750.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='283f5e7480a7c39f0e11abe63e3c1ecd8b5d8911', commit_date=1616243491.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file to fix the invalid version error and setup.cfg\n mkdir -p \"${ROOT_PATH}/control\"\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n echo \"[metadata]\nversion = 0.0.0.dev0\" > \"${ROOT_PATH}/setup.cfg\"\n \n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install setuptools wheel build\n\n # Try installing in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" python -m build --wheel\n micromamba run -n \"asv_${version}\" pip install dist/*.whl\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='2ce4bbd983ce00aa2998bce00c7c161ff7c0f1d5', commit_date=1640530701.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Create version file since setup.py fails due to invalid version\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in development mode with all dependencies\n cd \"${ROOT_PATH}\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='4ef15c4e95ec73cf5fc4d571be103e67b00caadf', commit_date=1647713524.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required build dependencies\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib setuptools wheel\n\n # Fix the version in setup.py before installing\n sed -i 's/version='\"'\"'dev'\"'\"'/version='\"'\"'0.0.0.dev0'\"'\"'/' \"${ROOT_PATH}/setup.py\"\n \n # Build and install in editable mode with test dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='82f3fe343422289f076d6883a2448d169606f821', commit_date=1701474288.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='a042895507367a5d001af7d3febfd8f386497554', commit_date=1739343810.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='a111b03e651d7c1828d264c1b143d9ccc9030b3f', commit_date=1640969033.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='abeb0e46a3d56c98b4534f73202a5a7ef5a0af87', commit_date=1751727883.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='c3c659638fb22bde11e40868f80f540060c50b40', commit_date=1616196419.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install required dependencies for python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n\n # Generate version file since setup.py shows version error\n echo \"__version__ = '0.0.0.dev0'\" > \"${ROOT_PATH}/control/version.py\"\n \n # Install the package in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha='f7d18f17bf90bfb99a06648982b22d1e4af6ccd2', commit_date=1686374157.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # Install additional dependencies required by python-control\n micromamba run -n \"asv_${version}\" pip install numpy scipy matplotlib\n \n # Build and install in editable mode with all dependencies\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}[test]\"\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-control', repo='python-control', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n # if make_version exists run it\n if [[ -f \"${ROOT_PATH}/make_version.py\" ]]; then\n micromamba run -n \"asv_${version}\" python \"${ROOT_PATH}/make_version.py\"\n fi\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-hyper', repo='h11', sha='80805f06e5859692a9dcc32484b2745b7f215a8a', commit_date=1597311658.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='python-hyper', repo='h11', sha='d64468627a4adeb4140e1480a836c85ba903a2c6', commit_date=1522821575.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pytroll', repo='satpy', sha='94fc4f7749bc2a27f76c7a16a7289037d41120f2', commit_date=1644305622.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pytroll', repo='satpy', sha='aa7f0dd616a973eb2de0e5b77a9ec51d08cc601c', commit_date=1659722497.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pywavelets', repo='pywt', sha='21a30d2af5aca2b3c5f827aa407cb549e2c99fb9', commit_date=1551150162.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} --use-pep517\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='pywavelets', repo='pywt', sha='74b44217a66199fa2e0f8e036955fc00f5cbc21a', commit_date=1708613848.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='qiskit', repo='qiskit', sha='023cbd4ec646fc81e0434b6de434bb477ad94979', commit_date=1755506488.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n if [[ \"$version\" == \"3.13\" ]]; then\n echo \"Skipping unsupported Python version: $version\"\n continue\n fi\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='qiskit', repo='qiskit', sha='b12e9ec3cff020983e3dde9b16f5ccc4fd0f4963', commit_date=1715792171.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython setuptools-rust\n micromamba run -n \"asv_${version}\" pip install rustup\n micromamba run -n \"asv_${version}\" rustup toolchain install stable\n micromamba run -n \"asv_${version}\" rustup default stable\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='quantumlib', repo='cirq', sha='01ae51eebf3b18a5cbee9fc0c697d4e1511c07f2', commit_date=1640302944.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH} || {\n echo \"Editable install failed, attempting wheel install\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --no-use-pep517 ${ROOT_PATH}\n }\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='quantumlib', repo='cirq', sha='1a75d9faee3b78765bb4badcf73e3d3e72a3ca2a', commit_date=1744652301.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='rockhopper-technologies', repo='enlighten', sha='d239fa5496a6c342b85343d53a4c16d8db9a87a5', commit_date=1698502059.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-image', repo='scikit-image', sha='0ff35b21293405e9922e44b9dda3818db960b87e', commit_date=1674543103.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-image', repo='scikit-image', sha='c7479c1d7430020a9ee9d92f25a1f0c33e36a7c1', commit_date=1597584715.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='044f1b00a62c9083ce3212a3e69046c9afac0de6', commit_date=1662470783.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='04860335c82d557e663b4cfa218663d1c7bf65fd', commit_date=1689974588.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='05ce8141bc71ad21e55be4d1b3f6609f65e91e49', commit_date=1603277025.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='06e566eb86cfd8c6107cf3bc2b477c97b80002a3', commit_date=1705578508.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='08b6157b0e18480569a5cc08efd44dabad9e60ce', commit_date=1701071115.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='092caed407f3b60de7677d4353bfe0db20a2faab', commit_date=1682603301.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0937b4ab48136eb161ead4abd4806d0708b1bb4c', commit_date=1607961058.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0a5af0d2a11c64d59381110f3967acbe7d88a031', commit_date=1599664355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0b45ac56f1c6acbd254f77fe562aa4919be6ca21', commit_date=1627651692.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0c65bbfe8ce816a181780d2a249c94dd653e115a', commit_date=1642433763.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0c74b8b7d5cdb60dc3a3240cdb36af40b9f40288', commit_date=1615733031.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='0de3b0d1eaacee9f7b15cabc05752cba945c7621', commit_date=1644500459.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1045d16ec13b1cab7878e7555538573d1884aad3', commit_date=1614793397.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='119b837a43d939ec02cf2aeba5bd203f8ebab4c7', commit_date=1649335379.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1415a2890b0451d80feef2d81e921a15d2b9d680', commit_date=1685431571.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='15cb8695a27eb8d4dc281ac3c937e12db8b5a6c1', commit_date=1604221237.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle the multiple packages error\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Create setup.cfg to explicitly specify packages\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\npackage_dir =\n = .\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='16625450b58f555dc3955d223f0c3b64a5686984', commit_date=1652277602.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='191f96908d6bbb46cf7293fb0ac1299f1e8b783d', commit_date=1719904631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1a78993217b52745d63a3495a819efd7f1b0530a', commit_date=1691676945.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1bb0306a1309f9a57d8c652dec731a95cbd0052b', commit_date=1610422145.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to avoid package discovery issues\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1d1aadd0711b87d2a11c80aad15df6f8cf156712', commit_date=1642210241.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1db03ce68be362baa12330ae3f42b9673863fa52', commit_date=1626800410.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='1fbf5fb317034e604d7ae71f368cd9e5b236ec0c', commit_date=1694855355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='210740408a732940430047fe9437c2193735573f', commit_date=1719586131.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='226da0d7c458816776549c2580abaa4782dc4c48', commit_date=1637400914.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='28831879f2b5a8f623623735480399735c1bb742', commit_date=1755578702.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2b2e2903e5635dd93a741c955a87260fb69cfc3d', commit_date=1720704772.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2ca6d4d2fd53a53f92f8b220edee862553b76ffa', commit_date=1750250202.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='2e213c618841f3635885bab034606512c40a7fd4', commit_date=1646246849.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='320b4c61f97fec3facc3c4c2b4cf9351d3425b44', commit_date=1596283836.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='32c5d05cbd7551fd983a250945013239e0e5cb94', commit_date=1631705680.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3cdfb56d340e77c2ffb5ad341ec4abebd8094a25', commit_date=1608655766.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='3d0a98d7765ccaf5aede89d0ec1088fda24e0465', commit_date=1644832679.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='408f561b87f9955e92619cbf924d595a2655344f', commit_date=1678175921.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='432778464cbffc8ca675c1df786c31f8c23fc62c', commit_date=1642715056.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='45a817933ef51a24f0c5863c1026b4fe664b26fa', commit_date=1608647213.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Use setup.py directly to handle the multiple packages issue\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4685cf624582cbc9a35d646f239347e54db798dc', commit_date=1652472968.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='49d26cb63fefe43c9b310136e4f2c172d8c433cb', commit_date=1599140563.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools==60.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and environment variables\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n\n # Ensure we're in the root directory\n cd \"${ROOT_PATH}\"\n\n # Install scikit-learn in development mode with specific build settings\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4bc61a09eac44a86758c6a02a2b47f912a696d3b', commit_date=1719575535.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='4e44edebf9e811c718c2842b65db2eb41ba01786', commit_date=1723709827.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='52d93e141a5d874bd288f15cc1d8990f09721aad', commit_date=1754304060.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='55a65a2fa5653257225d7e184da3d0c00ff852b1', commit_date=1695213631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='584d413fec25fb5c38f06c1fe88e652111395330', commit_date=1675930888.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='598045569c8f96fb345059f5316ea8903d374ff4', commit_date=1615476313.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid compatibility issues\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set environment variables for build\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_SKIP_NETWORK_TESTS=1\n \n # Install scikit-learn in development mode with explicit package specification\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e . --config-settings=package=sklearn\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5a332e77a10a44107276843d8532ef79f239c8f3', commit_date=1681854133.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5a850eb044ca07f1f3bcb1b284116d6f2d37df1b', commit_date=1657115862.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5b46d01f8d5015114644b91ce88ee4bc4fa5386d', commit_date=1680769691.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5c4e9a0fd82dd096bbdf78b69c264a741c768a86', commit_date=1690911539.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='5ffec3233034e0413f548380d4a22f4e0eecae94', commit_date=1678722797.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='63a1a31a17f9bd9cdf617b2cf04bfaf2f32f0a17', commit_date=1639082235.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='63ff30396a062a88387529a28bdb8a1675e9332e', commit_date=1678351529.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='64d54483edfa55ab44d836f9b08ff1bd38f7f6bb', commit_date=1627659978.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6595229d116b128c5b36f204dc941f69e14abc7f', commit_date=1718288797.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='673f6259f3fb7bd2a057b1889e23b280fe638998', commit_date=1612389138.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='67ca4dda1d61c9ad95ed68b04cb40da2c822e960', commit_date=1678114713.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='687e84a126965b4179b02d86041a9e997eba87c9', commit_date=1751036214.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='691b00f4b7d169d38cc46cf14668a5029b2df8eb', commit_date=1728910531.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6d7d0f275db08ca97e7ce9765e5e8f0604e490dd', commit_date=1641981733.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='6f91cbebe5c439d5712860315616b70cd2ca9f87', commit_date=1633437528.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='701537ecca85a333449814c82ac2b78db5f534a8', commit_date=1682379515.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='70ca21f106b603b611da73012c9ade7cd8e438b8', commit_date=1713791446.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='767e9ae7e4fec8bea36c0433ab42f500aacfde64', commit_date=1651223539.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='769da3d51feef52b97b8129bf4700cf088a247b2', commit_date=1613120619.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" wheel\n\n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py build_ext --inplace\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='76c28285d3d3eb6a2834b7d1db01e296187c60b8', commit_date=1677233852.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7798fd829d0eb3637da17cc5cb359bf52efa551f', commit_date=1630429058.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7a2f5ca3a8478333f194a085b0c3635d75fcdf4d', commit_date=1678442780.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7aabe53e730947df0f6f1f85d640e6daea5bfc9f', commit_date=1634742992.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7af0a18996efb10fcbcdb15c7c132d2eb36be736', commit_date=1687508727.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7b715111bff01e836fcd3413851381c6a1057ca4', commit_date=1624465784.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7c835d550c1dcaf44938b1c285db017a773d7dba', commit_date=1662054353.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7ddd6e5d34911346afe6839c16fc06fc820fc013', commit_date=1618947559.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='7f1d4d05064a160e19f786bfbac8996cf0ecac5d', commit_date=1707518612.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='80ebe21ec280892df98a02d8fdd61cbf3988ccd6', commit_date=1638310769.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='830864629e21509980a9c3904c9bb7bf2be8fec5', commit_date=1655213679.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8525ba5d3c3b5423a5599e654ce73b931882a434', commit_date=1754632277.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='863c552c448118249563f0e709ea83a1a9b2fc7f', commit_date=1612010007.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='871892cef9bc70224233fdf2140c896874c07b57', commit_date=1659000389.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='88c2db24bd3efb631372aa971270d6cb690d914d', commit_date=1726476355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='897c0c570511be4b7912a335052ed479ac5ca1f3', commit_date=1705781316.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8a71b840d3d7f6e5db9f9faf3b6c44f8ed6a3850', commit_date=1705345976.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8a7684705f636a8dfcde8e2239d2e0bcd624ac54', commit_date=1647426404.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8ad7c3f02daae525ee83231fbd33fb65e8e05288', commit_date=1633621378.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8bc36080d9855d29e1fcbc86da46a9e89e86c046', commit_date=1622540296.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8e64458f9f348885e6692639d7c8ebaf0adafca0', commit_date=1625562575.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='8eef0e767c4bdd2fdb83f51b162afa32386d5973', commit_date=1692883694.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9590c07128d3bad5978f08eeb34613d347b96e38', commit_date=1719499549.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='961afc72e0222cb108b77b68c145ea4424f089da', commit_date=1751880029.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='99410b1bdea296a0df48026aaee85472bf3cb7cf', commit_date=1625818419.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SKLEARN_NO_OPENMP=1 # Disable OpenMP to avoid potential build issues\n\n # Install scikit-learn in editable mode with optimizations\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" python setup.py develop --no-deps\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='99562100e941f0972a5a65484ff80f407eeb5137', commit_date=1674572593.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9c9c8582dff9f4563aa130ef89f155bad0051493', commit_date=1668796144.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='9e38cd00d032f777312e639477f1f52f3ea4b3b7', commit_date=1705585714.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a09a62eda27720a0cb949ea24b1e21d358f95176', commit_date=1676040745.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a47d569e670fd4102af37c3165c9b1ddf6fd3005', commit_date=1652372475.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a67ebbebc173007735e62eef7878c08435d28d89', commit_date=1718987804.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a7a416f74908cf890d1dd115b53f5811cb8e7598', commit_date=1680769655.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a85b14d4799ba7c4e13e0e942e599f8077dc182e', commit_date=1679350355.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='a8b1905e8f977fcd4d6a348678bb1e82ed9b3310', commit_date=1606807943.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='aa2131f9bdcfa7ff0dacfd6a47c207cbb68a49fa', commit_date=1751370298.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='abbeacc2daee2b213274924a5a4ffe6cbafb0627', commit_date=1651693256.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='aca8f20db461ca0dd70b02b6a1f41b957b2b12ee', commit_date=1665069106.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ad91259f20529306efe445f5a1da4dccc8c81b5a', commit_date=1663256210.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b1202af3b379e698539a2719f2b1e28706ce5388', commit_date=1638654791.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b3aea0053dadcb67adfc39a90c70ffca607a534f', commit_date=1643205359.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b5d55b4fd19ca97d68e4e34e5822865b0a8e90d2', commit_date=1651487470.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b609562c610822ad4b3c11a9e7a22710aba438af', commit_date=1637744681.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b6b6f63ebefe16403d11e8a0a2281b6e2a811933', commit_date=1678791874.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='b8229daafee0e50690d4b8447f93cf1069ba6880', commit_date=1701274890.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='bc7cd3189bc817545791071515693445e1e271db', commit_date=1617352203.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='bf0886bae0ccbc8c5d285b6e2affe7e40474f970', commit_date=1619532370.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c0eb3d37244cc4bf35b82e18bff37320e198b038', commit_date=1670930060.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build settings\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c217527af5744b9d0db8761c1e3667552312e5e7', commit_date=1652946509.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c300a8f2178fcae847f82ad548fe9452f2ba8bbb', commit_date=1658415495.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c47205fb7d45de50de4afa9760d974e754f103e1', commit_date=1707735651.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c63b21ec309f742defd56033eadfc8f7bf5b510b', commit_date=1711607317.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c6ad7361c8fc68188b83070aa0b6b797058c06fa', commit_date=1646214356.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9138537790cc0fa352968eed927433fe17ee17c', commit_date=1701967415.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9525d1600ecd526b9b98e275fc1b85782c25dea', commit_date=1634072165.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='c9f9b041758c3fa5fdf74b15995a3e3607b0ad5a', commit_date=1737104589.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='caaa1f52a0632294bf951a9283d015f7b5dd5dd5', commit_date=1732650609.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='cbe8648c33b94bd919c35f4d1e2ae1c4432d9749', commit_date=1748364732.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d5901462551283b689284e582152666faf0dc1da', commit_date=1676911719.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d5df806c9715443f5dc7de9023a1b7aa2045eae4', commit_date=1677234005.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d85b1d3302a3ff45179a5826a747e8ee2562f143', commit_date=1674489554.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d8d5637cfe372dd353dfc9f79dbb63c3189a9ecc', commit_date=1644836117.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='d92c76986ac6553ce8e0fe2c1bbaea500c105cc7', commit_date=1679480310.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dc1ea2751e8f4e18f61c7e6d767cf42c6e636256', commit_date=1608485758.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to handle compatibility\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<61.0.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Create a temporary setup.cfg to handle package discovery\n cat > ${ROOT_PATH}/setup.cfg << EOF\n[options]\npackages = find:\n\n[options.packages.find]\ninclude = sklearn*\nexclude = build_tools*, maint_tools*, asv_benchmarks*\nEOF\n \n # Install scikit-learn in editable mode\n cd ${ROOT_PATH}\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation -e .\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dc580a8ef5ee2a8aea80498388690e2213118efd', commit_date=1670501069.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='de67a4420f1713058070802ad593cbcd2ee2d5f3', commit_date=1677582108.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='df692c03c1a6003878c6fc4d2f9f222d304dcee3', commit_date=1649449476.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dfaef0c6c3aef0d00c72573728c90c1d542e2957', commit_date=1657123469.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='dfda968f1d0b3b1ecaeb4125d3e903416eaf18ec', commit_date=1678100532.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e113897235feaf309eaaed24001ca96f3608602f', commit_date=1648574496.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags for potential warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Build and install scikit-learn in development mode with specific flags\n micromamba run -n \"asv_${version}\" pip install --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e1db2a8173ca37e561cdfa4384481501c4d50868', commit_date=1644639631.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e1e8c66e05dd638ae785855bfb637e0180aea99c', commit_date=1642748755.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e411c29625e66f7e440f1acce4069e01201cf122', commit_date=1672782103.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e64714637d8cc9f4724ae21ea500e4bdc57b0a39', commit_date=1629207428.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='e7ae63f784c5f85af41cf8f346d194775f01f333', commit_date=1694440694.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eae3f294d3ba8ae636730537faef4cdd612083ff', commit_date=1678119642.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eb2920766d7b2ffb04359a1dc8b6c611960931b7', commit_date=1725568507.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eb85684feb0505694e66365ba9f4d10a409f8f0b', commit_date=1697017427.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ee524f455dbf0285f7b121a08f1e9613a518abcf', commit_date=1617906457.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='eecde00c7a706546271ff40d7d492b5f27046d2b', commit_date=1619516333.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ef200eb16813f4e579f3a4e6cd4603e16f72f5a8', commit_date=1680030341.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ef82b778ecaeee11d6bfd005f59e882410d330b6', commit_date=1751882162.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f1d3417b086550be670cbfbb5b3c1760ac99203f', commit_date=1646068982.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f4ed8ef5e4498c9de2ff4b713c1695d6f312ffba', commit_date=1733748660.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f812e2a27619650463cb12d765f1b443b47c0828', commit_date=1628181136.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\"\n \n # Set compiler flags and build options\n export CFLAGS=\"-Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n \n # Install scikit-learn in editable mode with specific build configuration\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f86f41d80bff882689fc16bd7da1fef4a805b464', commit_date=1695653805.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f965fcc0634e47b7230e120850cf7bb4efeb96e7', commit_date=1674829022.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='f96ce586eecb361d53b192ea3b44098d1bd49a77', commit_date=1637843007.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with specific versions to avoid distutils deprecation\n micromamba run -n \"asv_${version}\" pip install -U \"setuptools<60.0\" \"cython<3\" \"numpy<2\" \"scipy<1.14\" pytest joblib threadpoolctl\n\n # Set compiler flags and build options\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n export SKLEARN_BUILD_PARALLEL=3\n export SETUPTOOLS_USE_DISTUTILS=stdlib\n\n # Install scikit-learn in editable mode with optimizations\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fba028b07ed2b4e52dd3719dad0d990837bde28c', commit_date=1733159260.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fe08016877e8bd715816cf9fbfb1fb697c3446d2', commit_date=1754300286.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='fee76cc5405c01e283a3b079dcb865f3017d5007', commit_date=1705008338.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies with newer Cython as required by error message\n micromamba run -n \"asv_${version}\" pip install -U \"cython>=3.0.10\" \"numpy<2\" \"scipy<1.14\" \"setuptools>=60\" meson-python\n \n # Set compiler flags\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha='ff9344f3d8d11d38fa3a2497199113e5bac9537c', commit_date=1666642605.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n #### BUILD STEPS GO HERE. ####\n # Install build dependencies for scikit-learn\n micromamba run -n \"asv_${version}\" pip install -U meson-python \"cython<3\" \"numpy<2\" \"setuptools==60\" \"scipy<1.14\"\n \n # Set compiler flags to handle warnings\n export CFLAGS=\"$CFLAGS -Wno-error=incompatible-pointer-types\"\n \n # Install scikit-learn in editable mode with optimized build flags\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn', repo='scikit-learn', sha=None, commit_date=0.0)": { - "building_data": "#!/usr/bin/env bash\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\napt-get update && apt-get install -y ninja-build cmake\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n #### BUILD STEPS GO HERE. ####\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n #### BUILD STEPS END HERE. ####\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='57086e91b65b88a95c89449aa501ff68a61dc39a', commit_date=1563459886.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scikit-learn-contrib', repo='metric-learn', sha='faa240fd7469176036a91430ae6a0a45e627c94a', commit_date=1531145592.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scipy', repo='scipy', sha='83dbd97a76af8621dd0228a797f5207bed094c23', commit_date=1679643125.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran pybind11\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge openblas\n micromamba run -n \"asv_${version}\" git submodule update --init\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scipy', repo='scipy', sha='b919b4aa67a541b1fef91820a4e94156f7dd36d2', commit_date=1731196689.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython pythran\n micromamba run -n \"asv_${version}\" pip install pybind11\n micromamba run -n \"asv_${version}\" pip install openblas\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='cartopy', sha='9a4d894d9adab3b3a8d9cee6299581ba0ef9ec20', commit_date=1662748176.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='cartopy', sha='d9825f18dc6a70b5b4ef6bc5bf48d8025eef1e8e', commit_date=1581379933.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --upgrade setuptools\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='iris', sha='33deead5846b37019902ba067c87e710e55ff6e6', commit_date=1650551816.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scitools', repo='iris', sha='b2ce2a34e2eef7e3d6203c77ada7ed4ce89e3145', commit_date=1573652360.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='anndata', sha='2712af6efcf2d4356f4185a10e92328168710d9f', commit_date=1680623010.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython flit-core setuptools_scm\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='anndata', sha='df213f659f0e9eadfcab4af48ee98de7145252a7', commit_date=1733842403.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='scanpy', sha='7f3f89ac02e924a3a6d55c31730cfaf23b0b4223', commit_date=1744636041.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='scverse', repo='scanpy', sha='ad657edfb52e9957b9a93b3a16fc8a87852f3f09', commit_date=1718709475.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='shapely', repo='shapely', sha='3c3a83986ac5bf434e0ca6b7bd16571a1ddac0a4', commit_date=1696785164.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers geos\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='shapely', repo='shapely', sha='ff2ceac81cca6240c459eba5a5ce07084fe25ad2', commit_date=1662401853.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sourmash-bio', repo='sourmash', sha='9230fce7479c547c96dabe0c1a749a71a4b9e77c', commit_date=1650894889.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sourmash-bio', repo='sourmash', sha='d2d638b645048cc93377fb9aff8a3be8c937b8b3', commit_date=1613310154.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" conda install -y -c conda-forge rust\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='spotify', repo='voyager', sha='49416c5db539a40adba2588bfe19dc8736db01b2', commit_date=1734118555.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='spotify', repo='voyager', sha='88cfc468617fde8360ac6db7e71bc578ba49ed16', commit_date=1725990271.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml or setup.py if available\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation \"${ROOT_PATH}\"\n elif [[ -f \"${ROOT_PATH}/setup.py\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable \"${ROOT_PATH}\"\n else\n echo \"ERROR: No 'setup.py' or 'pyproject.toml' found in ${ROOT_PATH}.\"\n exit 1\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sunpy', repo='sunpy', sha='01ea7b5e2760c24e08386f95fd5fd1c0f73da47f', commit_date=1739035442.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='sunpy', repo='sunpy', sha='770f95dbfb033ffacc7172a3cff5158b09f7efe4', commit_date=1651836877.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython extension-helpers\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='textualize', repo='rich', sha='1de94713811101702b8fcf283c64d1a5de5a8213', commit_date=1657547667.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --use-pep517 --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='textualize', repo='rich', sha='cb92947610614e04116f82cb001ed44dda1699fb', commit_date=1647342081.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='tqdm', repo='tqdm', sha='0f823e79f303b4a93ef1381badb1e65757e5070f', commit_date=1603641812.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='tqdm', repo='tqdm', sha='42761473f9edf276937cc3a28a6fcabc59f5f97d', commit_date=1575632008.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='xorbitsai', repo='xorbits', sha='aee883be1dcd4cbbd43d67794932d5c858fcffe2', commit_date=1676955703.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install the project in editable mode, fallback to non-editable if it fails\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} || micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - }, - "Task(owner='xorbitsai', repo='xorbits', sha='ebc391fe0fa55599c3197c52408bd43a4bd9476f', commit_date=1695401335.0)": { - "building_data": "#!/usr/bin/env bash\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\neval \"$(micromamba shell hook --shell=bash)\"\nmicromamba activate base\n\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\nfor version in $python_versions; do\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba create -y -n \"asv_${version}\" -c conda-forge python=\"$version\" git conda mamba \"libmambapy<=1.9.9\" numpy scipy cython joblib threadpoolctl pytest compilers\n micromamba run -n \"asv_${version}\" pip install git+https://github.com/airspeed-velocity/asv\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME\n micromamba run -n \"asv_${version}\" pip install meson-python cython\n # Attempt to install using pyproject.toml if setup.py is not found\n if [[ -f \"${ROOT_PATH}/pyproject.toml\" ]]; then\n micromamba run -n \"asv_${version}\" pip install --verbose --no-build-isolation ${ROOT_PATH}\n else\n echo \"Neither 'setup.py' nor 'pyproject.toml' found in ${ROOT_PATH}. Cannot install the project.\"\n fi\ndone", - "dockerfile_data": "FROM buildpack-deps:jammy\n\nARG REPO_URL\nARG COMMIT_SHA\nRUN apt-get update && \\\n apt-get install -y --no-install-recommends \\\n curl git build-essential jq cmake ninja-build && \\\n rm -rf /var/lib/apt/lists/*\n\nRUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest \\\n | tar -xvj -C /usr/local/bin --strip-components=1 bin/micromamba\n\nENV MAMBA_ROOT_PREFIX=/opt/conda \\\n PATH=/opt/conda/bin:$PATH \\\n MAMBA_DOCKERFILE_ACTIVATE=1 \\\n OPENBLAS_NUM_THREADS=1 \\\n MKL_NUM_THREADS=1 \\\n OMP_NUM_THREADS=1\n\nRUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \\\n python=3.10 \\\n git asv pyperf mamba conda libmambapy jq && \\\n micromamba clean --all --yes\n\nRUN mkdir -p /workspace /output\nWORKDIR /workspace\n\nCOPY entrypoint.sh /entrypoint.sh\nRUN chmod +x /entrypoint.sh\n\nRUN git clone ${REPO_URL} /workspace/repo\nWORKDIR /workspace/repo\nRUN git checkout ${COMMIT_SHA}\n\nCOPY docker_build.sh /workspace/repo/docker_build.sh\nRUN chmod +x /workspace/repo/docker_build.sh\nRUN /workspace/repo/docker_build.sh\n\nENTRYPOINT [\"/entrypoint.sh\"]\n", - "entrypoint_data": "#!/usr/bin/env bash\n# set -euo pipefail\nset -x\n: \"${ASV_ARGS:?Need to set ASV_ARGS}\"\n: \"${ASV_MACHINE:=?Need to set ASV_MACHINE}\"\n: \"${ASV_OS:=?Need to set ASV_OS}\"\n: \"${ASV_NUM_CPU:=?Need to set ASV_NUM_CPU}\"\n: \"${ASV_ARCH:=?Need to set ASV_ARCH}\"\n: \"${ASV_CPU:=?Need to set ASV_CPU}\"\n: \"${ASV_RAM:=?Need to set ASV_RAM}\"\n\n\ncd_asv_json_dir() {\n local match\n match=$(find . -type f -name \"asv.*.json\" | head -n 1)\n\n if [[ -n \"$match\" ]]; then\n local dir\n dir=$(dirname \"$match\")\n cd \"$dir\" || echo \"Failed to change directory to $dir\"\n else\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n fi\n}\n\neval \"$(micromamba shell hook --shell=bash)\"\n\nmicromamba activate base\nROOT_PATH=${PWD}\ncd_asv_json_dir || exit 1\n\n# the conf name is one of \"asv.conf.json\" or \"asv.ci.conf.json\" or \"asv.*.json\"\nCONF_NAME=$(basename \"$(find . -type f -name \"asv.*.json\" | head -n 1)\")\nif [[ -z \"$CONF_NAME\" ]]; then\n echo \"No 'asv.*.json' file found in current directory or subdirectories.\"\n exit 1\nfi\n\n# Read the python versions from the asv.conf.json\npython_versions=$(python -c \"import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))\")\n# change the \"results_dir\" in asv.conf.json to \"/output/{COMMIT_SHA}/\"\nfor version in $python_versions; do\n # Create per\u2011Python env and install ASV\n python -c \"import asv, os, pathlib\npath = pathlib.Path('/output/'\\\"$COMMIT_SHA\\\"'/''\\\"$version\\\"')\npath.mkdir(parents=True, exist_ok=True)\n\nconfig = asv.config.Config.load('$CONF_NAME')\nconfig.results_dir = str(path / 'results')\nconfig.html_dir = str(path / 'html')\nconfig.branches = ['HEAD']\n\nasv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version)\nasv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version)\n\"\n micromamba run -n \"asv_${version}\" asv machine --yes --config $CONF_NAME --machine \"$ASV_MACHINE\" --os \"$ASV_OS\" --num_cpu \"$ASV_NUM_CPU\" --arch \"$ASV_ARCH\" --cpu \"$ASV_CPU\" --ram \"$ASV_RAM\"\n micromamba run -n \"asv_${version}\" asv run --show-stderr ${ASV_ARGS} --config $CONF_NAME\ndone\n\necho \"Benchmarks complete.\"\n" - } - }, - "version": 1 -} diff --git a/scratch/scripts/collect_and_filter_commits.py b/scratch/scripts/collect_and_filter_commits.py new file mode 100644 index 0000000..3deaf7d --- /dev/null +++ b/scratch/scripts/collect_and_filter_commits.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import argparse +import re +import tempfile +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, as_completed +from pathlib import Path +from typing import Any + +import pandas as pd +from git import Repo +from tqdm.auto import tqdm + +from datasmith.execution.collect_commits_offline import collect_commits +from datasmith.execution.utils import _get_commit_info_offline, find_file_in_tree +from datasmith.logging_config import configure_logging + +# Configure logging for the script +logger = configure_logging() + + +def parse_args() -> argparse.Namespace: + p = argparse.ArgumentParser(description="Filter commits for ASV benchmarks (fast version).") + + p.add_argument("--filtered-benchmarks-pth", required=True, help="Path to the filtered benchmarks CSV file.") + p.add_argument("--output-pth", required=True, help="Path to save the filtered commits CSV file.") + p.add_argument( + "--max-repos", type=int, default=150, help="Maximum number of repositories (sorted by stars) to consider." + ) + p.add_argument("--procs", type=int, default=1, help="Number of processes for fetching commit metadata (CPU-bound).") + + # Optional knobs. keep defaults sensible + p.add_argument("--threads", type=int, default=16, help="Worker threads for finding asv.conf.json (I/O-bound).") + return p.parse_args() + + +def _asv_conf_worker(repo_name: str) -> list[str] | None: + """Locate asv.conf.json inside a repo (wrapper for ThreadPool).""" + return find_file_in_tree(repo_name, "asv.conf.json") + + +def _commit_info_worker(arg_tuple: tuple[Repo, str]) -> dict[str, Any] | None: + """Wrapper for ProcessPool: arg_tuple = (repo_name, sha).""" + repo, sha = arg_tuple + # return _get_commit_info(repo, sha) + return _get_commit_info_offline(repo, sha) + + +NON_CORE_PATTERNS = re.compile( + r"""( + (^|/)tests?(/|$) | # any tests/ directory + (^|/)doc[s]?(/|$) | # docs/, doc/, documentation/ + (^|/)examples?(/|$) | # examples/ + (^|/)\.github(/|$) | # GitHub meta files + (^|/)benchmarks?(/|$) | # benchmarks/ + (^|/)dist-info(/|$) | # wheel metadata + (^|/)build(/|$) | # build artifacts + (^|/)site-packages(/|$) | # vendored wheels + (^|/)__(init|pycache)__ | # __init__.py, __pycache__ + (^|/)requirements-docs\.txt$| + (^|/)pyproject\.toml$| + (^|/)README\.md$ | + \.rst$ | # reStructuredText docs + \.md$ # markdown docs + )""", + re.VERBOSE, +) + + +def has_core_file(files_changed: str) -> bool: + """ + Return True if *any* path in the newline-separated `files_changed` + string is judged to be a *core* file under the rules above. + """ + for path in files_changed.split("\n"): + path = path.strip() + # Empty lines can show up if a commit touches a single file + if not path: + continue + if not NON_CORE_PATTERNS.search(path): + # As soon as we find one path that is NOT caught by the + # non-core pattern, we know the commit touched "core" code. + return True + return False + + +def main() -> None: + args = parse_args() + + benchmarks = pd.read_csv(args.filtered_benchmarks_pth) + + benchmarks = benchmarks.sort_values("stars", ascending=False, ignore_index=True).head(args.max_repos) + + with ThreadPoolExecutor(max_workers=args.threads) as tp: + benchmarks["asv_conf_path"] = list( + tqdm(tp.map(_asv_conf_worker, benchmarks["repo_name"]), total=len(benchmarks), desc="Scanning repos") + ) + + benchmarks = benchmarks.dropna(subset=["asv_conf_path"]) + + if benchmarks.empty: + # Nothing to do. create empty output to keep downstream happy. + Path(args.output_pth).write_text("", encoding="utf-8") + logger.warning("No repositories with asv.conf.json found. Exiting.") + return + + # with open(args.merged_commits_pth, encoding="utf-8") as f: + # commits = pd.DataFrame([json.loads(line.strip().replace("'", '"').replace("None", "null")) for line in f]) + + # commits = commits.merge(benchmarks, how="right", on="repo_name") + # commits = commits.dropna(subset=["commit_sha"]) + + # all_repo_names = set(commits["repo_name"]) + all_repo_names = set(benchmarks["repo_name"]) + + # download all repos to a temp dir + with tempfile.TemporaryDirectory(prefix="gh-repos-") as td: + + def clone_repo(repo_name: str) -> tuple[str, Repo]: + repo_name = repo_name.strip("/") + owner, name = repo_name.split("/", 1) + path = Path(td) / f"{owner}__{name}.git" + repo = Repo.clone_from( + f"https://github.com/{repo_name}.git", + path, + quiet=True, + allow_unsafe_options=True, + allow_unsafe_protocols=True, + ) + logger.debug("Cloned repo %s to %s", repo_name, path) + return repo_name, repo + + all_repos = {} + commit_info_args: list[tuple[Repo, str]] = [] + with ThreadPoolExecutor(max_workers=args.threads) as tp: + futures = {tp.submit(clone_repo, repo_name): repo_name for repo_name in all_repo_names} + for f in tqdm(as_completed(futures), total=len(futures), desc="Cloning repos"): + repo_name, repo = f.result() + all_repos[repo_name] = repo + commit_shas = collect_commits(repo) + for commit_sha in commit_shas: + commit_info_args.append((repo, commit_sha)) + + with ProcessPoolExecutor(max_workers=args.procs) as pp: + commit_info = list( + tqdm( + pp.map(_commit_info_worker, commit_info_args), + total=len(commit_info_args), + desc="Fetching commit metadata", + ) + ) + + commits_meta = pd.json_normalize(commit_info) # pyright: ignore[reportArgumentType] + commits_meta = commits_meta[commits_meta["has_asv"]] # Take out all commits that don't have asv installed. + # import IPython; IPython.embed() + + commits_merged = commits_meta[commits_meta["files_changed"].apply(has_core_file)].reset_index(drop=True) + + for k, repo in all_repos.items(): + repo.close() + logger.debug("Closed repo %s", k) + + out_path = Path(args.output_pth) + if not out_path.parent.exists(): + out_path.parent.mkdir(parents=True, exist_ok=True) + # commits.to_csv(out_path, index=False) + # commits_merged.to_json(out_path, orient="records", lines=True, index=False) + # save as a parquet file + commits_merged.to_parquet(out_path, index=False) + + logger.info("✔ Wrote %s rows → %s", len(commits_merged), out_path) + + +if __name__ == "__main__": + main() diff --git a/scratch/scripts/collect_commits.py b/scratch/scripts/collect_commits.py index 99d6259..75fa5a2 100644 --- a/scratch/scripts/collect_commits.py +++ b/scratch/scripts/collect_commits.py @@ -19,9 +19,9 @@ def parse_args() -> argparse.Namespace: formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) p.add_argument( - "--dashboards", + "--repos", required=True, - help="Location of the dashboards csv that has a column `url` with GitHub repository URLs", + help="Location of the repos csv that has a column `url` with GitHub repository URLs", ) p.add_argument( "--outfile", @@ -41,9 +41,9 @@ def parse_args() -> argparse.Namespace: if __name__ == "__main__": args = parse_args() - dashboards = pd.read_csv(args.dashboards) - urls = dashboards["url"] - repo_names = dashboards["repo_name"] + repos = pd.read_csv(args.repos) + urls = repos["url"] + repo_names = repos["repo_name"] idx = 0 all_commits = [] @@ -56,7 +56,7 @@ def parse_args() -> argparse.Namespace: per_page=args.per_page, ) tagged_commits = find_tagged_releases(repo_name=repo_name) - # parent_commits = find_parent_commits(repo_name=repo_name, commits=perf_commits + tagged_commits) + # parent_commits = find_parent_releases(repo_name=repo_name, commits=perf_commits + tagged_commits) commits = list(set(perf_commits + tagged_commits)) for i, commit in enumerate(commits, 1): commit_id = f"{repo_name}_{i}" diff --git a/scratch/scripts/collect_perf_commits.py b/scratch/scripts/collect_perf_commits.py new file mode 100644 index 0000000..f9d0ec5 --- /dev/null +++ b/scratch/scripts/collect_perf_commits.py @@ -0,0 +1,51 @@ +import argparse +from pathlib import Path + +import pandas as pd + +from datasmith.agents.config import configure_agent_backends +from datasmith.agents.perf_judge import PerfClassifier +from datasmith.execution.collect_commits_offline import batch_classify_commits +from datasmith.logging_config import configure_logging + +configure_agent_backends(local=True) + +# logger = configure_logging(level=10, stream=open(__file__ + ".log", "a")) +logger = configure_logging() + + +def parse_args() -> argparse.Namespace: + p = argparse.ArgumentParser( + description="Collect perf-related commits", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + p.add_argument("--commits", type=Path, required=True, help="Path to a JSONL file containing commit information.") + p.add_argument("--outfile", type=Path, required=True, help="Path to save the filtered commits JSONL file.") + p.add_argument("--max-workers", type=int, default=-1, help="Number of parallel workers. -1 = sequential.") + return p.parse_args() + + +def main(args: argparse.Namespace) -> None: + if args.commits.suffix == ".parquet": + df = pd.read_parquet(args.commits).sort_values("stars", ascending=False) + else: + df = pd.read_json(args.commits, lines=True).sort_values("stars", ascending=False) + filtered_df = df.copy(deep=True) + perf_classifier = PerfClassifier() + all_shas = set() + for repo_name, group in df.groupby("repo_name"): + assert isinstance(repo_name, str), f"Unexpected repo_name type: {type(repo_name)}" # noqa: S101 + logger.info(f"Processing {repo_name} with {len(group)} commits.") + commits = [(row["sha"], row["message"], row.get("file_change_summary", "")) for _, row in group.iterrows()] + merge_shas = batch_classify_commits(perf_classifier, repo_name, commits, args.max_workers) + logger.info(f"Found {len(merge_shas)} perf-related commits in {repo_name}.") + all_shas.update(merge_shas) + + filtered_df = filtered_df[filtered_df["sha"].isin(all_shas)].reset_index(drop=True) + logger.info(f"Filtered down to {len(filtered_df)} commits from {len(df)} total commits.") + filtered_df.to_json(args.outfile, lines=True, orient="records") + + +if __name__ == "__main__": + args = parse_args() + main(args) diff --git a/scratch/scripts/initialize_context_registry.py b/scratch/scripts/initialize_context_registry.py index 63d1e54..5542a77 100644 --- a/scratch/scripts/initialize_context_registry.py +++ b/scratch/scripts/initialize_context_registry.py @@ -1,7 +1,5 @@ from __future__ import annotations -from pathlib import Path - from datasmith.docker.context import ContextRegistry, DockerContext from datasmith.logging_config import get_logger @@ -10,400 +8,780 @@ CONTEXT_REGISTRY = ContextRegistry(default_context=DockerContext()) CONTEXT_REGISTRY.register( - "asv/astropy/astropy", + "astropy/astropy:pkg", DockerContext( building_data="""#!/usr/bin/env bash +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base -ROOT_PATH=${PWD} -git clone -b main https://github.com/astropy/astropy-benchmarks.git --single-branch -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 + +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - #### BUILD STEPS GO HERE. #### - export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" - micromamba run -n "asv_${version}" pip install -e . scipy matplotlib - #### BUILD STEPS END HERE. #### +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" + fi + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" + + export CFLAGS="${CFLAGS:-} -Wno-error=incompatible-pointer-types" + micromamba run -n "$ENV_NAME" pip install -e . scipy matplotlib + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv/scikit-learn/scikit-learn", + "scikit-learn/scikit-learn:pkg", DockerContext( building_data="""#!/usr/bin/env bash +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -apt-get update && \ - apt-get install -y \ - ninja-build \ - cmake - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 + +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" pip install meson-python cython - #### BUILD STEPS GO HERE. #### - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} - #### BUILD STEPS END HERE. #### +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" + fi + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "$ENV_NAME" pip install meson-python cython + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv/scikit-learn/scikit-learn/8bc36080d9855d29e1fcbc86da46a9e89e86c046", + "scikit-learn/scikit-learn/8bc36080d9855d29e1fcbc86da46a9e89e86c046:pkg", DockerContext( building_data="""#!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail + +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 + +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - #### BUILD STEPS GO HERE. #### - micromamba run -n "asv_${version}" pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" - export CFLAGS="$CFLAGS -Wno-error=incompatible-pointer-types" - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} - #### BUILD STEPS END HERE. #### +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" + fi + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "$ENV_NAME" pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" + export CFLAGS="${CFLAGS:-} -Wno-error=incompatible-pointer-types" + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv/nvidia/warp", + "nvidia/warp:pkg", DockerContext( - building_data=""" -#!/usr/bin/env bash - -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) + building_data="""#!/usr/bin/env bash +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 +fi +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" # only run the below if condition if bvh.cpp is present grep -q '^#include ' "${ROOT_PATH}/warp/native/bvh.cpp" || \ sed -i 's|#include |#include \n#include |' "${ROOT_PATH}/warp/native/bvh.cpp" -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 -fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" pip install meson-python cython - #### BUILD STEPS GO HERE. #### - micromamba run -n "asv_${version}" python "${ROOT_PATH}/build_lib.py" - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} - #### BUILD STEPS END HERE. #### + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" + fi + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + micromamba run -n "$ENV_NAME" pip install meson-python cython + export CFLAGS="${CFLAGS:-} -Wno-error=incompatible-pointer-types" + micromamba run -n "$ENV_NAME" python "${ROOT_PATH}/build_lib.py" + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv/python-control/python-control", + "python-control/python-control:pkg", DockerContext( building_data=""" #!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail + +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 + +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" pip install meson-python cython - #### BUILD STEPS GO HERE. #### - # if make_version exists run it - if [[ -f "${ROOT_PATH}/make_version.py" ]]; then - micromamba run -n "asv_${version}" python "${ROOT_PATH}/make_version.py" +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" fi - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} - #### BUILD STEPS END HERE. #### + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers + if [[ -f "${ROOT_PATH}/make_version.py" ]]; then + micromamba run -n "$ENV_NAME" python "${ROOT_PATH}/make_version.py" + fi + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) CONTEXT_REGISTRY.register( - "asv/mdanalysis/mdanalysis", + "mdanalysis/mdanalysis:pkg", DockerContext( building_data=""" #!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} +# Purpose: Build/install the repo (editable) in one or more ASV micromamba envs, then run health checks. +set -euo pipefail + +###### SETUP CODE (NOT TO BE MODIFIED) ###### +# Loads micromamba, common helpers, and persisted variables from the env stage. +source /etc/profile.d/asv_utils.sh || true +source /etc/profile.d/asv_build_vars.sh || true eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 + +ROOT_PATH=${ROOT_PATH:-$PWD} # Usually /workspace/repo +REPO_ROOT="$ROOT_PATH" +TARGET_VERSIONS="${PY_VERSION:-${ASV_PY_VERSIONS:-}}" +if [[ -z "${TARGET_VERSIONS}" ]]; then + echo "Error: No PY_VERSION set and ASV_PY_VERSIONS not found." >&2 + exit 1 fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy "cython<3" joblib threadpoolctl pytest compilers meson-python - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - #### BUILD STEPS GO HERE. #### - # if maintainer/install_all.sh exists run it with develop - if [[ -f "maintainer/install_all.sh" ]]; then - micromamba activate "asv_${version}" - working_dir=$(pwd) - cd "$ROOT_PATH" || exit 1 - bash maintainer/install_all.sh develop - cd "$working_dir" || exit 1 - else - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable . +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(asv_detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" fi - #### BUILD STEPS END HERE. #### + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some basic micromamba packages. + + micromamba install -y -n "$ENV_NAME" -c conda-forge git conda mamba "libmambapy<=1.9.9" numpy scipy "cython<3" joblib threadpoolctl pytest compilers meson-python + # if maintainer/install_all.sh exists run it with develop + if [[ -f "maintainer/install_all.sh" ]]; then + micromamba activate "$ENV_NAME" + working_dir=$(pwd) + cd "$ROOT_PATH" || exit 1 + bash maintainer/install_all.sh develop + cd "$working_dir" || exit 1 + else + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + fi + + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + # Machine-readable markers (useful in logs) + echo "::import_name=${IMP}::env=${ENV_NAME}" done + +log "All builds complete ✅" """.strip(), - dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, - entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, ), ) -# CONTEXT_REGISTRY.register( -# "asv/default/nobuild", -# DockerContext( -# building_data="""#!/usr/bin/env bash -# cd_asv_json_dir() { -# local match -# match=$(find . -type f -name "asv.*.json" | head -n 1) - -# if [[ -n "$match" ]]; then -# local dir -# dir=$(dirname "$match") -# cd "$dir" || echo "Failed to change directory to $dir" -# else -# echo "No 'asv.*.json' file found in current directory or subdirectories." -# fi -# } -# eval "$(micromamba shell hook --shell=bash)" -# micromamba activate base - -# ROOT_PATH=${PWD} -# cd_asv_json_dir || exit 1 -# CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -# if [[ -z "$CONF_NAME" ]]; then -# echo "No 'asv.*.json' file found in current directory or subdirectories." -# exit 1 -# fi -# python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -# for version in $python_versions; do -# python -c "import asv, os, pathlib -# path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -# path.mkdir(parents=True, exist_ok=True) - -# config = asv.config.Config.load('$CONF_NAME') -# config.results_dir = str(path / 'results') -# config.html_dir = str(path / 'html') - -# asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -# asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -# " -# micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers -# micromamba activate "asv_${version}" -# pip install git+https://github.com/airspeed-velocity/asv -# pip install -U meson-python "cython<3" "numpy<2" "setuptools==60" "scipy<1.14" -# # BUILD STEPS GO HERE. -# done -# """.strip(), -# dockerfile_data=CONTEXT_REGISTRY["asv/default/default"].dockerfile_data, -# entrypoint_data=CONTEXT_REGISTRY["asv/default/default"].entrypoint_data, -# ), -# ) - -CONTEXT_REGISTRY.save_to_file(Path("scratch/context_registry_init.json")) + +# if __name__ == "__main__": +# CONTEXT_REGISTRY.save_to_file(Path("scratch/context_registry_init.json")) +# # for each context, build an image with the context. +# import docker +# from datasmith.docker.context import DockerContext, Task, ContextRegistry +# client = docker.from_env() + +# import concurrent.futures + +# fails = dict() + +# def build_context(task_context): +# task, context = task_context +# if not task.sha: +# # use latest commit on default branch +# import requests +# resp = requests.get(f"https://api.github.com/repos/{task.owner}/{task.repo}") +# resp.raise_for_status() +# commit_sha = resp.json().get("default_branch", "main") +# task = Task(owner=task.owner, repo=task.repo, sha=commit_sha, tag=task.tag) +# print(f"Building image for {task.get_image_name()} at {task.sha}") +# res = context.build_container_streaming( +# client=client, +# image_name=task.get_image_name(), +# build_args={ +# "REPO_URL": f"https://www.github.com/{task.owner}/{task.repo}", +# "COMMIT_SHA": task.sha, # pyright: ignore[reportArgumentType] +# }, +# force=True, +# timeout_s=300, +# pull=True, +# ) +# return (task, res) + +# with concurrent.futures.ProcessPoolExecutor() as executor: +# futures = {executor.submit(build_context, item): item[0] for item in CONTEXT_REGISTRY.registry.items()} +# for future in concurrent.futures.as_completed(futures): +# task = futures[future] +# try: +# task, res = future.result() +# if res.ok: +# print(f"Built image {task.get_image_name()} successfully") +# else: +# print(f"Failed to build image {task.get_image_name()}") +# fails[task] = res +# except Exception as exc: +# print(f"Exception building image {task.get_image_name()}: {exc}") +# fails[task] = exc + +# if fails: +# import IPython; IPython.embed() diff --git a/scratch/scripts/synthesize_contexts.py b/scratch/scripts/synthesize_contexts.py index e46b2e9..99fd0c6 100644 --- a/scratch/scripts/synthesize_contexts.py +++ b/scratch/scripts/synthesize_contexts.py @@ -60,6 +60,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--context-registry", type=Path, + required=True, help="Path to the context registry JSON file.", ) return parser.parse_args() @@ -115,7 +116,13 @@ def prepare_tasks( def main(args: argparse.Namespace) -> None: client = get_docker_client() all_states = process_inputs(args) - context_registry = ContextRegistry.load_from_file(path=args.context_registry) + if not args.context_registry.exists(): + logger.warning("main: context registry file %s does not exist; starting fresh", args.context_registry) + context_registry = ( + ContextRegistry.load_from_file(path=args.context_registry) + if args.context_registry.exists() + else ContextRegistry() + ) # Prepare tasks tasks = prepare_tasks(all_states, args.limit_per_repo, context_registry) diff --git a/src/datasmith/__init__.py b/src/datasmith/__init__.py index 707e7fd..368d658 100644 --- a/src/datasmith/__init__.py +++ b/src/datasmith/__init__.py @@ -2,7 +2,6 @@ import dotenv -from datasmith.agents.config import configure_agent_backends from datasmith.logging_config import configure_logging # Configure logging with the centralized configuration @@ -16,8 +15,5 @@ def setup_environment() -> None: else: logger.warning("No tokens.env file found. Skipping environment variable setup.") - # Initialize agent backends - configure_agent_backends() - setup_environment() diff --git a/src/datasmith/agents/config.py b/src/datasmith/agents/config.py index 4bed37d..093349c 100644 --- a/src/datasmith/agents/config.py +++ b/src/datasmith/agents/config.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -def configure_agent_backends() -> None: +def configure_agent_backends(local: bool = False) -> None: model = os.getenv("DSPY_MODEL_NAME") backend_url = os.getenv("DSPY_URL") kwargs: dict[str, str | dict[str, str]] = {"model_type": "chat"} @@ -35,5 +35,14 @@ def configure_agent_backends() -> None: logger.warning("Environment variables for DSPY model or API key are not set.") return - lm = dspy.LM(model=model, api_base=backend_url, api_key=api_key, **kwargs) # pyright: ignore[reportArgumentType] + lm = get_local_lm() if local else dspy.LM(model=model, api_base=backend_url, api_key=api_key, **kwargs) # pyright: ignore[reportArgumentType] dspy.configure(lm=lm) + + +def get_local_lm() -> dspy.LM: + if (model := os.getenv("DSPY_MODEL_NAME", None)) and (backend_url := os.getenv("DSPY_URL", None)): + api_key = os.getenv("DSPY_API_KEY", None) + return dspy.LM(model=model, api_base=backend_url, api_key=api_key, model_type="chat") + raise NotImplementedError( + "Local LM is not configured. Please set DSPY_MODEL_NAME and DSPY_URL environment variables." + ) diff --git a/src/datasmith/agents/container_toolbox.py b/src/datasmith/agents/container_toolbox.py index d9bd644..8341977 100644 --- a/src/datasmith/agents/container_toolbox.py +++ b/src/datasmith/agents/container_toolbox.py @@ -348,24 +348,13 @@ def try_import(self, cmd_python: str, candidates: list[str]) -> dict: if line.startswith("IMPORTED::"): succeeded = line.split("::", 2)[1] break + stdout_snip = (res.stdout[:1000] + "..." + res.stdout[-1000:]) if len(res.stdout) > 2000 else res.stdout + stderr_snip = (res.stderr[:1000] + "..." + res.stderr[-1000:]) if len(res.stderr) > 2000 else res.stderr return { "ok": ok, "tried": candidates, "succeeded": succeeded, - "stdout": res.stdout[-2000:], - "stderr": res.stderr[-2000:], + "stdout": stdout_snip, + "stderr": stderr_snip, "rc": 0 if ok else 1, } - - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - client = docker.from_env() - img_name = "asvprobe/textualize/rich/1de94713811101702b8fcf283c64d1a5de5a8213" - pc = PersistentContainer( - client, img_name, name=img_name.replace("/", "-").replace(":", "-"), workdir="/workspace/repo" - ) - import IPython - - IPython.embed() - pc.stop() diff --git a/src/datasmith/agents/context_synthesis.py b/src/datasmith/agents/context_synthesis.py index 9508b07..61a9be5 100644 --- a/src/datasmith/agents/context_synthesis.py +++ b/src/datasmith/agents/context_synthesis.py @@ -34,47 +34,57 @@ def _ts_to_iso(ts: float | int | None) -> str: return str(ts) -class BuildScriptSynthesis(dspy.Signature): - """ - Draft a bash script (docker_build.sh) to build & install a Python repo inside micromamba envs - discovered via asv.*.json. The script MUST be idempotent and safe to run in Docker. - Respect this template: - - discover and cd into the dir containing asv.*.json - - for each python version listed there: - * create micromamba env "asv_${version}" - * ensure asv + build tooling - * then perform project install (editable or wheel) with best-guess flags - - no user prompts, all non-interactive - - Do not surround with ```bash ... ```. Return raw bash script. - """ - - # Inputs - owner_repo = dspy.InputField(desc="The repository this commit belongs to. E.g. 'scikit-learn/scikit-learn'.") - sha = dspy.InputField(desc="The commit SHA that is currently checked out.") - commit_date = dspy.InputField(desc="The commit date in ISO format, e.g. '2023-10-05T12:34:56Z'.") - stderr_logs = dspy.InputField( - desc="The most recent stderr logs from the last build attempt. Upto ~8k tail-end chars." - ) - stdout_logs = dspy.InputField( - desc="The most recent stdout logs from the last build attempt. Upto ~8k tail-end chars." - ) - failure_more = dspy.InputField( - desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." - ) - last_docker_build_script = dspy.InputField(desc="Previous docker_build.sh script.") - expected_template = dspy.InputField(desc="Stable outer template..") - - # Output - error_summary = dspy.OutputField(desc="A brief summary of the last build failure, and possible causes.") - resolution_steps = dspy.OutputField(desc="Concrete steps to resolve the failure.") - docker_build_script = dspy.OutputField( - desc="Final executable bash script that successfully builds the project from source." - ) +# class BuildScriptSynthesis(dspy.Signature): +# """ +# Draft a bash script (docker_build.sh) to build & install a Python repo inside micromamba envs +# discovered via asv.*.json. The script MUST be idempotent and safe to run in Docker. +# Respect this template: +# - discover and cd into the dir containing asv.*.json +# - for each python version listed there: +# * create micromamba env "asv_${version}" +# * ensure asv + build tooling +# * then perform project install (editable or wheel) with best-guess flags +# - no user prompts, all non-interactive +# - Do not surround with ```bash ... ```. Return raw bash script. +# """ + +# # Inputs +# owner_repo = dspy.InputField(desc="The repository this commit belongs to. E.g. 'scikit-learn/scikit-learn'.") +# sha = dspy.InputField(desc="The commit SHA that is currently checked out.") +# commit_date = dspy.InputField(desc="The commit date in ISO format, e.g. '2023-10-05T12:34:56Z'.") +# stderr_logs = dspy.InputField( +# desc="The most recent stderr logs from the last build attempt. Upto ~8k tail-end chars." +# ) +# stdout_logs = dspy.InputField( +# desc="The most recent stdout logs from the last build attempt. Upto ~8k tail-end chars." +# ) +# failure_more = dspy.InputField( +# desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." +# ) +# last_docker_build_script = dspy.InputField(desc="Previous docker_build.sh script.") +# initial_template = dspy.InputField(desc="Stable outer template..") + +# # Output +# error_summary = dspy.OutputField(desc="A brief summary of the last build failure, and possible causes.") +# resolution_steps = dspy.OutputField(desc="Concrete steps to resolve the failure.") +# docker_build_script = dspy.OutputField( +# desc="Final executable bash script that successfully builds the project from source." +# ) +# Draft a bash script (docker_build.sh) to build & install a Python repo inside micromamba envs +# discovered via asv.*.json. The script MUST be idempotent and safe to run in Docker. +# Respect this template: +# - discover and cd into the dir containing asv.*.json +# - for each python version listed there: +# * create micromamba env "asv_${version}" +# * ensure asv + build tooling +# * then perform project install (editable or wheel) with best-guess flags +# - no user prompts, all non-interactive +# - Do not surround with ```bash ... ```. Return raw bash script. class BuildScriptAgentStep(dspy.Signature): """ - An interactive planner for producing docker_build.sh. It can either: + An interactive planner for producing a bash script (docker_build.sh) to build and install a Python repo inside micromamba envs. It can either: (A) request a TOOL call (probe_repo, list_tree, read_file, try_import) with JSON args, or (B) output the final script. If you need a tool, set next_action to one of: 'probe_repo' | 'list_tree' | 'read_file' | 'try_import' | 'none'. @@ -82,6 +92,11 @@ class BuildScriptAgentStep(dspy.Signature): For list_tree, provide JSON like {"depth": 2}. For try_import, provide JSON like {"candidates": ["foo", "bar"]}. Return docker_build_script ONLY when you're satisfied. + + Respect these constraints: + - The script MUST be idempotent and safe to run in Docker. + - No user prompts, all non-interactive. + - Do not surround with Markdown tags like ```bash ... ```. """ # Inputs (context) @@ -98,8 +113,10 @@ class BuildScriptAgentStep(dspy.Signature): desc="Describes where the failure occured. E.g. 'N/A', 'build failed', 'asv run failed'." ) last_docker_build_script = dspy.InputField(desc="Previous docker_build.sh script.") - expected_template = dspy.InputField(desc="Stable outer template..") - repo_facts_json = dspy.InputField(desc="JSON of inferred repo facts (paths, candidates, versions).") + initial_template = dspy.InputField( + desc="Initial template of the docker_build.sh script with important instructions." + ) + repo_facts_json = dspy.InputField(desc="Some inferred repo facts (A JSON object with paths, candidates, versions).") toolbelt = dspy.InputField(desc="Human-readable summary of available tools.") messages_log = dspy.InputField(desc="Transcript of prior tool actions & observations.") @@ -127,6 +144,8 @@ def _toolbelt_text(self) -> str: "- list_tree(depth=2): show a trimmed top-level tree for orientation.\n" "- read_file(path, max_bytes=65536): read a file at this commit.\n" "- try_import(candidates=[...]): (post-build) quick python import check inside the built image.\n" + "- exec_arbitrary(command): run arbitrary shell command in the checked-out repo (careful!).\n" + "- none/finish + docker_build_script: when you are satisfied, return the final build script in the docker_build_script field.\n" ) def forward( @@ -138,7 +157,7 @@ def forward( stdout_logs: str, failure_more: str, last_docker_build_script: str, - expected_template: str, + initial_template: str, repo_facts_json: str, tool_executor: ContainerToolExecutor, max_steps: int = 4, @@ -164,7 +183,7 @@ def forward( stdout_logs=stdout_logs or "", failure_more=failure_more or "N/A", last_docker_build_script=last_docker_build_script or "", - expected_template=expected_template, + initial_template=initial_template, repo_facts_json=repo_facts_json or "{}", toolbelt=toolbelt, messages_log=messages_log, @@ -200,19 +219,20 @@ def forward( # stdout_logs=stdout_logs or "", # failure_more=failure_more or "N/A", # last_docker_build_script=last_docker_build_script or "", - # expected_template=expected_template, + # initial_template=initial_template, # ) # Safety belt: ensure the required fixed template anchors are present. # script = out.docker_build_script.strip() # pyright: ignore[reportAttributeAccessIssue] script = (iter_script or "").strip() logger.debug("DSPy: candidate script preview: %s", _preview(script, 240)) - must_haves = ["cd_asv_json_dir()", "micromamba", "for version in $python_versions; do"] + must_haves = ["###### SETUP CODE (NOT TO BE MODIFIED) ######"] ok_template = all(m in script for m in must_haves) - must_not_haves = ["```", "import IPython", "from IPython", "exit(", "sys.exit("] + must_not_haves = ["```bash", "```", "import IPython", "from IPython"] no_bad = all(m not in script for m in must_not_haves) - if (not ok_template) or (not no_bad): - logger.warning("DSPy: template anchors missing; falling back to provided template") - script = expected_template + if not ok_template: + raise RuntimeError(f"Generated script is missing required template anchors: {must_haves}") + if not no_bad: + raise RuntimeError(f"Generated script contains disallowed fragments: {must_not_haves}") logger.info("DSPy: finalized script length=%d", len(script)) assert isinstance(script, str), "type mismatch" # noqa: S101 return script @@ -258,27 +278,32 @@ def synthesize_script( merged_log = _merge_tail(stderr_tail, stdout_tail) logger.debug("synthesize_script: merged_log_len=%d", len(merged_log)) - script = program( - owner_repo=f"{task.owner}/{task.repo}", - sha=task.sha, - commit_date=_ts_to_iso(getattr(task, "commit_date", None)), - stderr_logs=stderr_tail or "", - stdout_logs=stdout_tail or "", - failure_more=failure_more or "N/A", - last_docker_build_script=last_script or "", - expected_template=building_template, - repo_facts_json=tool_exec.facts_json(), - tool_executor=tool_exec, - max_steps=max_steps, - ) - script = str(script) - logger.info("synthesize_script: script length=%d", len(script)) + try: + script = program( + owner_repo=f"{task.owner}/{task.repo}", + sha=task.sha, + commit_date=_ts_to_iso(getattr(task, "commit_date", None)), + stderr_logs=stderr_tail or "", + stdout_logs=stdout_tail or "", + failure_more=failure_more or "N/A", + last_docker_build_script=last_script or "", + initial_template=building_template, + repo_facts_json=tool_exec.facts_json(), + tool_executor=tool_exec, + max_steps=max_steps, + ) + script = str(script) + logger.info("synthesize_script: script length=%d", len(script)) + except Exception: + logger.exception("synthesize_script: error=%s") + return "" + return script def build_once_with_context( client: docker.DockerClient, - image_name: str, + task: Task, context: DockerContext, repo_url: str, sha: str, @@ -287,8 +312,9 @@ def build_once_with_context( tail_chars: int, probe: bool = False, pull: bool = False, + force: bool = True, ) -> BuildResult: - logger.info("build_once_with_context: registering context key=%s", image_name) + logger.info("build_once_with_context: registering context key=%s", task.get_image_name()) logger.debug( "build_once_with_context: build args: REPO_URL=%s, COMMIT_SHA=%s, timeout_s=%s, tail_chars=%s, pull=%s", repo_url, @@ -297,12 +323,13 @@ def build_once_with_context( tail_chars, pull, ) + res = context.build_container_streaming( client=client, - image_name=image_name, + image_name=task.get_image_name(), build_args={"REPO_URL": repo_url, "COMMIT_SHA": sha}, probe=probe, - force=True, + force=force, timeout_s=timeout_s, tail_chars=tail_chars, pull=pull, @@ -332,18 +359,19 @@ def agent_build_and_validate( Saves attempt pickles and final pickle on success. """ assert task.sha is not None, "task.sha must be set" # noqa: S101 - other_contexts = context_registry.get_similar(task) - logger.info("agent_build_and_validate: found %d similar contexts", len(other_contexts)) - if len(other_contexts) >= 1: - _, most_similar_ctx = other_contexts[0] - default_building_data = most_similar_ctx.building_data + default_building_template = context_registry.get_default(tag="env")[1].building_data + if len(similar_contexts := context_registry.get_similar(task.with_tag("env"))) > 0: + _, context = similar_contexts[0] + logger.info( + "build_once_with_context: found %d similar contexts; using most similar with key=%s", + len(similar_contexts), + str(context), + ) + first_guess = context.building_data else: - _, most_similar_ctx = context_registry.get_default() - default_building_data = context_registry["asvprobe/default/default"].building_data - - import IPython - - IPython.embed() + _, context = context_registry.get_default(tag="env") + logger.info("build_once_with_context: no similar context found; using default with key=%s", str(context)) + first_guess = default_building_template logger.info( "agent_build_and_validate: start for %s/%s@%s (max_attempts=%d)", task.owner, task.repo, task.sha, max_attempts @@ -351,57 +379,47 @@ def agent_build_and_validate( program = BuildScriptProgram() - image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() + # image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() + repo_url = f"https://www.github.com/{task.owner}/{task.repo}" - logger.debug("agent_build_and_validate: image_name=%s repo_url=%s", image_name, repo_url) + logger.debug("agent_build_and_validate: task=%s repo_url=%s", task, repo_url) # build probe. - probe_image_name = image_name.replace("asv/", "asvprobe/") - if not client.images.list(name=probe_image_name): + if not client.images.list(name=task.with_tag("env").get_image_name()): logger.info("agent_build_and_validate: probe image not found, building probe image") - probe_res = build_once_with_context( + env_res = build_once_with_context( client=client, - image_name=probe_image_name, - context=most_similar_ctx, + task=task.with_tag("env"), + context=context, repo_url=repo_url, sha=task.sha, timeout_s=args.build_timeout, tail_chars=args.tail_chars, probe=True, pull=True, + force=False, # If the env is already present, don't rebuild (saves time) ) - if not probe_res.ok: + if not env_res.ok: logger.warning("agent_build_and_validate: probe build failed; something is wrong with Dockerfile") raise RuntimeError("probe build failed; check Dockerfile.") tool_exec = ContainerToolExecutor( docker_client=client, - image_name=probe_image_name, - container_name=probe_image_name.replace("/", "-"), + image_name=task.with_tag("env").get_image_name(), + container_name=task.with_tag("env").get_container_name(), workdir="/workspace/repo/", ) try: attempts: list[AttemptRecord] = [] - prior_script = "" # empty on attempt #1 # Attempt loop - for i in range(1, max_attempts + 1): + for i in range(max_attempts + 1): logger.info("agent_build_and_validate: attempt %d/%d", i, max_attempts) - if i == 1: + if i == 0: failure_more = "N/A" - script = synthesize_script( - program, - task, - prior_script, - stderr_tail="", - stdout_tail="", - building_template=default_building_data, - failure_more=failure_more, - tool_exec=tool_exec, - max_steps=args.max_steps, - ) + script = first_guess else: last = attempts[-1].build_result stderr_tail = (last.stderr_tail if last else "") or "" @@ -416,40 +434,55 @@ def agent_build_and_validate( len(stdout_tail), failure_more, ) - script = synthesize_script( - program, - task, - attempts[-1].building_data, - stderr_tail=stderr_tail, - stdout_tail=stdout_tail, - building_template=default_building_data, - failure_more=failure_more, - tool_exec=tool_exec, - max_steps=args.max_steps, - ) + try: + script = synthesize_script( + program, + task, + attempts[-1].building_data, + stderr_tail=stderr_tail, + stdout_tail=stdout_tail, + building_template=default_building_template, + failure_more=failure_more, + tool_exec=tool_exec, + max_steps=args.max_steps, + ) + except Exception as e: + logger.error("agent_build_and_validate: synthesis error: %s", e, exc_info=True) + build_res = BuildResult( + ok=False, + image_id=None, + image_name=task.with_tag("pkg").get_image_name(), + rc=1, + duration_s=0.0, + stderr_tail=str(e), + stdout_tail="", + ) + attempts.append(AttemptRecord(attempt_idx=i, building_data="", build_result=build_res)) + break # exit attempt loop ctx = DockerContext(building_data=script) - with context_registry.get_lock(): - context_registry.register(image_name, ctx) - # Save attempt pickle - attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" - _save_pickle(ctx, attempt_pickle) + if i >= 1: + attempt_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-attempt-{i}.pkl" + _save_pickle(ctx, attempt_pickle) # Build - logger.info("agent_build_and_validate: building image '%s'", image_name) + logger.info("agent_build_and_validate: building image '%s'", task.get_image_name()) build_res = build_once_with_context( client=client, - image_name=image_name, + task=task.with_tag("pkg"), context=ctx, repo_url=repo_url, sha=task.sha, timeout_s=args.build_timeout, - tail_chars=args.tail_chars, + tail_chars=args.tail_chars * 2, + force=True, # Always rebuild package image to pick up new script ) attempts.append(AttemptRecord(attempt_idx=i, building_data=script, build_result=build_res)) if build_res.ok: + with context_registry.get_lock(): + context_registry.register(task.with_tag("pkg"), ctx) # import_works = False # import_check_res = None # try: @@ -480,7 +513,7 @@ def agent_build_and_validate( final_pickle = args.output_dir / f"{task.owner}-{task.repo}-{task.sha}-final.pkl" _save_pickle(ctx, final_pickle) logger.info("agent_build_and_validate: build succeeded; starting validation run") - result = validate_one(task, args, client, context_registry, machine_defaults) + result = validate_one(task.with_tag("pkg"), args, client, context_registry, machine_defaults) logger.info( "agent_build_and_validate: validation stage=%s ok=%s rc=%s", result.get("stage"), @@ -511,12 +544,12 @@ def agent_build_and_validate( # All attempts failed last = attempts[-1].build_result - logger.error("agent_build_and_validate: all attempts failed for %s", image_name) + logger.error("agent_build_and_validate: all attempts failed for %s", task.with_tag("pkg").get_image_name()) return { "owner": task.owner, "repo": task.repo, "sha": task.sha, - "image_name": image_name, + "image_name": task.with_tag("pkg").get_image_name(), "stage": "build", "ok": False, "rc": (last.rc if last else 1), diff --git a/src/datasmith/agents/perf_judge.py b/src/datasmith/agents/perf_judge.py index 4fb09c1..e2f5ccd 100644 --- a/src/datasmith/agents/perf_judge.py +++ b/src/datasmith/agents/perf_judge.py @@ -92,11 +92,13 @@ class JudgeSignature(dspy.Signature): - Startup/import time reductions, memory reductions, fewer allocations, less I/O, fewer syscalls. - Fixing a **speed regression** or a change whose *intent* is “speed up”. - Behavior changes **explicitly** framed as speeding things up (e.g., “non-blocking requests (speed-up …)”). + - New feature geared towards performance with test cases. ### Do **NOT** count (label **NO**) unless the message clearly states product runtime gets faster: - - Test/bench/ASV/perf-test changes; thresholds; CI; coverage; Makefile/tox/pre-commit; refactors “for tests”. - - Merges, version bumps, housekeeping (“tidy”), or ambiguous “attempt to fix perf tests”. + - Purely Test/bench/ASV/perf-test changes; thresholds; CI; coverage; Makefile/tox/pre-commit; refactors “for tests”. + - Purely merges, version bumps, housekeeping (“tidy”), or ambiguous “attempt to fix perf tests”. - Pure UX frequency changes with “no measurable reduction in speed”. + - Pure documentation changes. ### Tie-breaker (recall-first) If ambiguous but plausibly about product/runtime performance, prefer **YES**. Only choose **NO** when it clearly applies solely to tests/infra or non-runtime concerns. @@ -113,6 +115,10 @@ class JudgeSignature(dspy.Signature): """ message = dspy.InputField(desc="A single commit message string.") + file_change_summary = dspy.InputField( + desc="A markdown table summarizing all the files changed in the commit along with lines added/removed.", + default="", + ) debug_json = dspy.OutputField( desc="JSON dump of the model's internal state, useful for debugging.", default=None, @@ -124,8 +130,8 @@ def __init__(self) -> None: super().__init__() self.predict = dspy.Predict(JudgeSignature) - def forward(self, message: str) -> dspy.Prediction: - prediction = self.predict(message=message) + def forward(self, message: str, file_change_summary: str) -> dspy.Prediction: + prediction = self.predict(message=message, file_change_summary=file_change_summary) out: str = prediction.get("debug_json", None) # pyright: ignore[reportAttributeAccessIssue] try: data = json.loads(out) @@ -153,43 +159,43 @@ def __init__(self) -> None: super().__init__() self.judge = LLMJudge() - def forward(self, message: str) -> dspy.Prediction: - prior_label, prior_conf, prior_flags = heuristic_prior(message) - if prior_label is True and prior_conf >= 55: - result = { - "label": "YES", - "reason": "Positive performance cues in message.", - "confidence": prior_conf, - "flags": prior_flags, - } - return dspy.Prediction(json=json.dumps(result)) + def forward(self, message: str, file_change_summary: str = "") -> dspy.Prediction: + # prior_label, prior_conf, prior_flags = heuristic_prior(message) + # if prior_label is True and prior_conf >= 55: + # result = { + # "label": "YES", + # "reason": "Positive performance cues in message.", + # "confidence": prior_conf, + # "flags": prior_flags, + # } + # return dspy.Prediction(json=json.dumps(result)) # Ask LLM judge - judged = json.loads(self.judge(message=message).json) # pyright: ignore[reportAttributeAccessIssue] + judged = json.loads(self.judge(message=message, file_change_summary=file_change_summary).json) # pyright: ignore[reportAttributeAccessIssue] - tests_only = "tests-only" in prior_flags or "tests-only" in judged.get("flags", []) + tests_only = "tests-only" in judged.get("flags", []) if judged["label"] == "YES": return dspy.Prediction(json=json.dumps(judged)) - if prior_label is True and not tests_only: - judged["label"] = "YES" - judged["reason"] = "Recall-first override: positive perf hints." - judged["confidence"] = max(judged["confidence"], 60) - judged["flags"] = list(dict.fromkeys(judged.get("flags", []) + prior_flags + ["ambiguous"])) - return dspy.Prediction(json=json.dumps(judged)) + # if prior_label is True and not tests_only: + # judged["label"] = "YES" + # judged["reason"] = "Recall-first override: positive perf hints." + # judged["confidence"] = max(judged["confidence"], 60) + # judged["flags"] = list(dict.fromkeys(judged.get("flags", []) + prior_flags + ["ambiguous"])) + # return dspy.Prediction(json=json.dumps(judged)) # Otherwise respect NO (or explicit tests-only) if tests_only: judged["label"] = "NO" judged["reason"] = "Tests/bench/infra-only message." - judged["confidence"] = max(judged["confidence"], prior_conf, 70) - judged["flags"] = list(dict.fromkeys(judged.get("flags", []) + prior_flags + ["infra"])) + judged["confidence"] = max(judged["confidence"], 70) + judged["flags"] = list(dict.fromkeys([*judged.get("flags", []), "infra"])) return dspy.Prediction(json=json.dumps(judged)) - def get_response(self, message: str) -> tuple[bool, str]: + def get_response(self, message: str, file_change_summary: str = "") -> tuple[bool, str]: """ Get the label for a commit message. """ - json_str = self(message=message).json # pyright: ignore[reportAttributeAccessIssue] + json_str = self(message=message, file_change_summary=file_change_summary).json # pyright: ignore[reportAttributeAccessIssue] response = json.loads(json_str) return (response["label"] == "YES", json_str) diff --git a/src/datasmith/agents/tool_executor.py b/src/datasmith/agents/tool_executor.py index c883760..a832c66 100644 --- a/src/datasmith/agents/tool_executor.py +++ b/src/datasmith/agents/tool_executor.py @@ -58,6 +58,22 @@ def choose_action(self, action: str, action_input: str) -> str: observation = self.exec_read_file(action_input) elif action == "try_import": observation = self.exec_try_import(action_input) + elif action == "exec_arbitrary": + # careful, this is arbitrary code execution! + cmd = action_input.strip().split("\n")[0][:200] + if not cmd: + observation = "[exec_arbitrary] missing command" + else: + res = self._pc.exec(cmd, timeout_s=30) + stdout_snip = ( + (res.stdout[:1000] + "..." + res.stdout[-1000:]) if len(res.stdout) > 2000 else res.stdout + ) + stderr_snip = ( + (res.stderr[:1000] + "..." + res.stderr[-1000:]) if len(res.stderr) > 2000 else res.stderr + ) + observation = ( + f"[exec_arbitrary] rc={res.rc}\n--- STDOUT ---\n{stdout_snip}\n--- STDERR ---\n{stderr_snip}" + ) else: observation = f"[noop] Unknown action '{action}'" except Exception as e: diff --git a/src/datasmith/docker/Dockerfile b/src/datasmith/docker/Dockerfile index 6301d1a..95edbc2 100644 --- a/src/datasmith/docker/Dockerfile +++ b/src/datasmith/docker/Dockerfile @@ -1,7 +1,10 @@ -FROM buildpack-deps:jammy +# syntax=docker/dockerfile:1.7 + +FROM buildpack-deps:jammy AS base ARG REPO_URL ARG COMMIT_SHA + RUN apt-get update && \ apt-get install -y --no-install-recommends \ curl git build-essential jq cmake ninja-build && \ @@ -25,15 +28,31 @@ RUN micromamba install -y -p $MAMBA_ROOT_PREFIX -c conda-forge \ RUN mkdir -p /workspace /output WORKDIR /workspace +FROM base AS env +ARG REPO_URL +ARG COMMIT_SHA + +# Entrypoint is inherited by pkg COPY entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh +ENTRYPOINT ["/entrypoint.sh"] -RUN git clone ${REPO_URL} /workspace/repo +RUN git clone "$REPO_URL" /workspace/repo WORKDIR /workspace/repo -RUN git checkout ${COMMIT_SHA} +RUN git checkout "$COMMIT_SHA" +LABEL vcs.url="$REPO_URL" vcs.ref="$COMMIT_SHA" -COPY docker_build.sh /workspace/repo/docker_build.sh -RUN chmod +x /workspace/repo/docker_build.sh -RUN /workspace/repo/docker_build.sh +# ---- ENV STAGE: create envs, persist vars, install smoke tool ---- +COPY docker_build_env.sh /workspace/repo/docker_build_env.sh +RUN chmod +x /workspace/repo/docker_build_env.sh && \ + /workspace/repo/docker_build_env.sh -ENTRYPOINT ["/entrypoint.sh"] + +FROM env AS pkg + +# ---- PKG STAGE: build+test the package for each ASV Python ---- +COPY docker_build_pkg.sh /workspace/repo/docker_build_pkg.sh +RUN chmod +x /workspace/repo/docker_build_pkg.sh && \ + /workspace/repo/docker_build_pkg.sh +# If you want to restrict to one version at build time, replace with: +# RUN PY_VERSION=3.10 /workspace/repo/docker_build_pkg.sh diff --git a/src/datasmith/docker/context.py b/src/datasmith/docker/context.py index 63528e2..b435dcc 100644 --- a/src/datasmith/docker/context.py +++ b/src/datasmith/docker/context.py @@ -4,6 +4,7 @@ import datetime import io import json +import re import tarfile import threading import time @@ -11,7 +12,7 @@ from collections.abc import Mapping from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, ClassVar import docker from docker.errors import APIError, DockerException, ImageNotFound @@ -39,7 +40,59 @@ class Task: repo: str sha: str | None = None commit_date: float = 0.0 - kind: str = "asv" + tag: str = "pkg" # 'pkg' (env + package) or 'env' (env-only) + + @staticmethod + def _sanitize_component(s: str) -> str: + """ + Sanitize a component for Docker image/container naming: + - lowercase + - keep only [a-z0-9._-] + - collapse invalid runs to '-' + - strip leading/trailing separators + """ + s = s.lower() + s = re.sub(r"[^a-z0-9._-]+", "-", s) + s = s.strip("._-") + return s or "unknown" + + def with_tag(self, tag: str) -> Task: + """Return a new Task with the given tag.""" + if tag not in {"env", "pkg"}: + raise ValueError(f"Tag must be either 'env' or 'pkg', got '{tag}'.") + return Task(owner=self.owner, repo=self.repo, sha=self.sha, commit_date=self.commit_date, tag=tag) + + def get_image_name(self) -> str: + """Return the Docker image name for this task (repo:tag).""" + assert self.tag in {"env", "pkg"}, "Tag must be either 'env' or 'pkg'." # noqa: S101 + + owner = self._sanitize_component(self.owner) + repo = self._sanitize_component(self.repo) + sha_part = f"-{self._sanitize_component(self.sha)}" if self.sha else "" + + # New scheme: "owner-repo[-sha]:{tag}" + image_repo = f"{owner}-{repo}{sha_part}" + return f"{image_repo}:{self.tag}" + + def get_container_name(self) -> str: + """Return a suitable (deterministic) Docker container name for this task.""" + assert self.tag in {"env", "pkg"}, "Tag must be either 'env' or 'pkg'." # noqa: S101 + + owner = self._sanitize_component(self.owner) + repo = self._sanitize_component(self.repo) + sha_part = f"-{self._sanitize_component(self.sha)}" if self.sha else "" + tag_part = f"-{self._sanitize_component(self.tag)}" + + # Container names cannot contain ':'; allowed: [a-zA-Z0-9][a-zA-Z0-9_.-] + # We keep it lowercase and deterministic. + name = f"{owner}-{repo}{sha_part}{tag_part}" + + # Ensure starts with an alphanumeric character + if not re.match(r"^[a-z0-9]", name): + name = f"c-{name}" + + # Be conservative on length (Docker allows long names, but trim to 128 chars) + return name[:128] class DockerContext: @@ -53,33 +106,33 @@ class DockerContext: default_dockerfile_loc = Path(__file__).parent / "Dockerfile" default_entrypoint_loc = Path(__file__).parent / "entrypoint.sh" - default_builder_loc = Path(__file__).parent / "docker_build.sh" - default_probe_loc = Path(__file__).parent / "probe_build.sh" + default_docker_build_env_loc = Path(__file__).parent / "docker_build_env.sh" + default_docker_build_pkg_loc = Path(__file__).parent / "docker_build_pkg.sh" dockerfile_data: str entrypoint_data: str + env_building_data: str building_data: str - probing_data: str def __init__( self, building_data: str | None = None, dockerfile_data: str | None = None, entrypoint_data: str | None = None, - probing_data: str | None = None, + env_building_data: str | None = None, ) -> None: - if building_data is None: - building_data = self.default_builder_loc.read_text() if dockerfile_data is None: dockerfile_data = self.default_dockerfile_loc.read_text() if entrypoint_data is None: entrypoint_data = self.default_entrypoint_loc.read_text() - if probing_data is None: - probing_data = self.default_probe_loc.read_text() + if env_building_data is None: + env_building_data = self.default_docker_build_env_loc.read_text() + if building_data is None: + building_data = self.default_docker_build_pkg_loc.read_text() - self.building_data = building_data self.dockerfile_data = dockerfile_data self.entrypoint_data = entrypoint_data - self.probing_data = probing_data + self.env_building_data = env_building_data + self.building_data = building_data def build_tarball_stream(self, probe: bool = False) -> io.BytesIO: tar_stream = io.BytesIO() @@ -97,17 +150,31 @@ def build_tarball_stream(self, probe: bool = False) -> io.BytesIO: entrypoint_info.mode = 0o755 # Make it executable tar.addfile(entrypoint_info, io.BytesIO(entrypoint_data)) - # Add docker_build.sh - building_data = self.probing_data.encode("utf-8") if probe else self.building_data.encode("utf-8") - builder_info = tarfile.TarInfo(name="docker_build.sh") - builder_info.size = len(building_data) - builder_info.mode = 0o755 # Make it executable - tar.addfile(builder_info, io.BytesIO(building_data)) + # Add docker_build_env.sh + env_building_data = self.env_building_data.encode("utf-8") + env_building_info = tarfile.TarInfo(name="docker_build_env.sh") + env_building_info.size = len(env_building_data) + env_building_info.mode = 0o755 # Make it executable + tar.addfile(env_building_info, io.BytesIO(env_building_data)) + + if not probe: + # Add docker_build_pkg.sh + building_data = self.building_data.encode("utf-8") + building_info = tarfile.TarInfo(name="docker_build_pkg.sh") + building_info.size = len(building_data) + building_info.mode = 0o755 # Make it executable + tar.addfile(building_info, io.BytesIO(building_data)) # Reset the stream position to the beginning tar_stream.seek(0) return tar_stream + def process_image_name(self, image_name: str) -> tuple[str, str]: + """Split image name into (repo, target). Target is required.""" + assert ":" in image_name and image_name.rsplit(":", 1)[1], "Image name must include a ':target' suffix." # noqa: S101 + repo, target = image_name.rsplit(":", 1) + return repo, target + def build_container( self, client: docker.DockerClient, @@ -117,6 +184,7 @@ def build_container( probe: bool = False, ) -> None: """Builds the Docker image if it does not exist or if force is True.""" + _, target = self.process_image_name(image_name) image_exists = False try: image = client.images.get(image_name) @@ -141,6 +209,7 @@ def build_container( custom_context=True, tag=image_name, buildargs=build_args, + target=target, ) except DockerException: logger.exception("Failed to build Docker image '%s'", image_name) @@ -168,6 +237,7 @@ def build_container_streaming( # noqa: C901 Returns a BuildResult and does NOT raise for typical failures (so callers can report immediately). """ + _, target = self.process_image_name(image_name) t0 = time.time() try: # Fast path: respect existing image when not forcing @@ -212,6 +282,7 @@ def build_container_streaming( # noqa: C901 decode=True, rm=True, pull=pull, + target=target, ) except DockerException: logger.exception("Failed to initiate build for '%s'", image_name) @@ -299,6 +370,7 @@ def to_dict(self) -> dict[str, str]: "dockerfile_data": self.dockerfile_data, "entrypoint_data": self.entrypoint_data, "building_data": self.building_data, + "env_building_data": self.env_building_data, } @classmethod @@ -310,12 +382,20 @@ def from_dict(cls, data: Mapping[str, Any]) -> DockerContext: return cls( dockerfile_data=data.get("dockerfile_data"), entrypoint_data=data.get("entrypoint_data"), - building_data=data.get("building_data"), + building_data=data.get("building_data", None), + env_building_data=data.get("env_building_data", None), ) class ContextRegistry: - """Registry for Docker contexts to avoid rebuilding the same context multiple times.""" + """Registry for Docker contexts keyed by owner/repo[/sha], independent of tag. + + Input key format (required): "owner/repo[/sha]:{tag}", where {tag} ∈ {"env","pkg"}. + The `tag` is validated and preserved on returned `Task`s, but **ignored for storage**; + all contexts are stored under a canonical key with tag='pkg'. + """ + + VALID_TAGS: ClassVar[set[str]] = {"env", "pkg"} def __init__(self, registry: dict[Task, DockerContext] | None = None, default_context: DockerContext | None = None): if registry is None: @@ -326,43 +406,54 @@ def __init__(self, registry: dict[Task, DockerContext] | None = None, default_co if default_context is None: default_context = DockerContext() - # ensure a default context for BOTH namespaces - for k in ("asv", "asvprobe"): - t = Task(owner="default", repo="default", sha=None, kind=k) - if t not in self.registry: - self.registry[t] = default_context - logger.debug("Default Docker contexts initialized (asv + asvprobe).") - - def get_default(self, kind: str = "asv") -> tuple[Task, DockerContext]: - task = Task(owner="default", repo="default", sha=None, kind=kind) - return task, self.registry[task] + # Single default context (canonicalized to tag='pkg') + default_task_canonical = Task(owner="default", repo="default", sha=None, tag="pkg") + if default_task_canonical not in self.registry: + self.registry[default_task_canonical] = default_context + logger.debug("Default Docker context initialized (single canonical context).") + + @staticmethod + def _canonicalize(task: Task) -> Task: + """Return a copy of Task with tag='pkg' for registry keying.""" + if task.tag == "pkg": + return task + return task.with_tag("pkg") + + def _canonicalize_from_key(self, key: str | Task) -> Task: + """Parse if needed, then canonicalize to tag='pkg' for dict keying.""" + t = self.parse_key(key) if isinstance(key, str) else key + return self._canonicalize(t) + + def get_default(self, tag: str = "pkg") -> tuple[Task, DockerContext]: + if tag not in self.VALID_TAGS: + raise ValueError(f"Unknown tag '{tag}'. Valid tags: {sorted(self.VALID_TAGS)}") + # lookup under canonical default; return Task with requested tag + user_task = Task(owner="default", repo="default", sha=None, tag=tag) + canonical = self._canonicalize(user_task) + return user_task, self.registry[canonical] def get_lock(self) -> threading.Lock: return self._lock - def parse_key(self, key: str) -> Task: - """Parse a string key into a Task object (now preserving 'asv' vs 'asvprobe').""" - if not ( - key.startswith("asv/") - or key.startswith("asv/default") - or key.startswith("asvprobe/") - or key.startswith("asvprobe/default") - ): - raise ValueError("Key must start with 'asv/' or 'asv/default' or 'asvprobe/' or 'asvprobe/default'") - - # Handle defaults like "asv/default-" and "asvprobe/default-" - if key.startswith("asv/default") or key.startswith("asvprobe/default"): - kind = "asvprobe" if key.startswith("asvprobe/") else "asv" - parts = key.split("-") - repo = parts[-1] if len(parts) > 2 else "default" - return Task(owner="default", repo=repo, sha=None, commit_date=0.0, kind=kind) - - parts = key.split("/") - if parts[0] not in ("asv", "asvprobe") or not (3 <= len(parts) <= 4): - raise ValueError("Key must be 'asv/owner/repo[/sha]' or 'asvprobe/owner/repo[/sha]'") - - kind, owner, repo = parts[0], parts[1], parts[2] - sha = None if len(parts) != 4 else parts[3] + def parse_key(self, key: str | Task) -> Task: + """Parse 'owner/repo[/sha]:{tag}' into a Task. Tag is required and validated.""" + if isinstance(key, Task): + return key # already parsed + + # Hard assertion per request: all keys MUST include a ':tag' + assert ":" in key and key.rsplit(":", 1)[1], "All keys must include a ':tag' suffix (e.g., ':env' or ':pkg')." # noqa: S101 + + prefix, tag = key.rsplit(":", 1) + tag = tag.strip() + if tag not in self.VALID_TAGS: + raise ValueError(f"Unknown tag '{tag}'. Valid tags: {sorted(self.VALID_TAGS)}") + + parts = prefix.split("/") + if not (2 <= len(parts) <= 3): + raise ValueError("Key must be 'owner/repo[:tag]' or 'owner/repo/sha[:tag]'") + + owner, repo = parts[0], parts[1] + sha = None if len(parts) != 3 else parts[2] date_unix = 0.0 if sha: @@ -375,71 +466,89 @@ def parse_key(self, key: str) -> Task: logger.warning("Failed to fetch commit info for %s/%s@%s: %s", owner, repo, sha, exc) date_unix = 0.0 - return Task(owner=owner, repo=repo, sha=sha, commit_date=date_unix, kind=kind) + return Task(owner=owner, repo=repo, sha=sha, commit_date=date_unix, tag=tag) def register(self, key: str | Task, context: DockerContext) -> None: - """Register a new Docker context.""" - if isinstance(key, str): - key = self.parse_key(key) - if key in self.registry: - logger.warning(f"Context '{key}' is already registered, overwriting.") - self.registry[key] = context - logger.debug(f"Registered Docker context: {key}") + """Register a new Docker context. Stored under canonical (tag='pkg').""" + t = self.parse_key(key) if isinstance(key, str) else key + canonical = self._canonicalize(t) + if canonical in self.registry: + logger.warning(f"Context '{canonical}' is already registered, overwriting.") + + # if the tag is "env" and we already have a "pkg" version, warn the user + # and instead of changing the context completely, overwrite all files + # except the building_data (which is pkg-specific) + if t.tag == "env" and canonical in self.registry: + existing = self.registry[canonical] + context = DockerContext( + dockerfile_data=context.dockerfile_data, + entrypoint_data=context.entrypoint_data, + env_building_data=context.env_building_data, + building_data=existing.building_data, + ) + logger.warning( + f"Registering 'env' context for '{canonical}' which already has a 'pkg' version; preserving 'pkg' building_data." + ) + self.registry[canonical] = context + logger.debug(f"Registered Docker context under canonical key: {canonical}") def get(self, key: str | Task) -> DockerContext: """ - Retrieve a Docker context by key using hierarchical matching. - "asv/astropy/astropy/14134" should query these queries in-order: - "asv/astropy/astropy/14134" - "asv/astropy/astropy" + Retrieve a Docker context by key using hierarchical matching (tag-insensitive). + 'owner/repo/sha:tag' queries in-order: + 1) owner/repo/sha (canonical key, tag='pkg') + 2) owner/repo (canonical key, tag='pkg') + 3) default (canonical key, tag='pkg') """ - if isinstance(key, str): - key = self.parse_key(key) + # Keep the user's tag but look up under canonical keys + user_task = self.parse_key(key) if isinstance(key, str) else key + canonical = self._canonicalize(user_task) - # exact match first - if key.sha is not None and key in self.registry: - logger.debug(f"Found exact context for key '{key}'.") - return self.registry[key] + # exact match first (canonical) + if canonical.sha is not None and canonical in self.registry: + logger.debug(f"Found exact context for key '{user_task}' via '{canonical}'.") + return self.registry[canonical] - # owner/repo base (same namespace!) - base = Task(owner=key.owner, repo=key.repo, sha=None, kind=key.kind) + # owner/repo base (canonical) + base = Task(owner=canonical.owner, repo=canonical.repo, sha=None, tag="pkg") if base in self.registry: - logger.debug(f"Found fallback context '{base}' for key '{key}'.") + logger.debug(f"Found fallback context '{base}' for key '{user_task}'.") return self.registry[base] - logger.info(f"No context found for key '{key}'. Using default context for namespace '{key.kind}'.") - return self.registry[Task(owner="default", repo="default", sha=None, kind=key.kind)] + logger.info(f"No context found for key '{user_task}'. Using default context.") + return self.registry[Task(owner="default", repo="default", sha=None, tag="pkg")] def get_similar(self, key: str | Task) -> list[tuple[Task, DockerContext]]: # noqa: C901 """ - Retrieve a list of Docker contexts by key using hierarchical matching. - "asv/astropy/astropy/14134" should return contexts for these queries in-order: - 1) "asv/astropy/astropy/14134" (exact match, if present) - 2) Any others starting with "asv/astropy/astropy/" (e.g., "asv/astropy/astropy/abcdef") - sorted by abs(key.commit_date / candidate.commit_date) if key.commit_date is not None else alphabetically - 3) "asv/astropy/astropy" (owner/repo base, if present) - Keys like "asv/astropy/otherrepo*" or "asv/otherowner/*" must NOT match. + Retrieve contexts similar to a key, constrained to SAME owner/repo (tag-insensitive). + Order: + 1) exact match (if present) — returned Task uses the caller's tag + 2) other SHAs for owner/repo — returned Tasks use the caller's tag + sorted by |commit_date diff| if available, else by SHA + 3) base owner/repo — returned Task uses the caller's tag """ - if isinstance(key, str): - key = self.parse_key(key) + user_task = self.parse_key(key) if isinstance(key, str) else key + canonical = self._canonicalize(user_task) results: list[tuple[Task, DockerContext]] = [] - seen: set[Task] = set() + seen_canonical: set[Task] = set() # 1) Exact match (if present) - if key in self.registry: - results.append((key, self.registry[key])) - seen.add(key) + if canonical in self.registry: + results.append((canonical.with_tag(user_task.tag), self.registry[canonical])) + seen_canonical.add(canonical) - # 2) Other SHAs for same owner/repo *in the same namespace* + # 2) Other SHAs for same owner/repo (canonical keys in registry) candidates: list[tuple[Task, DockerContext]] = [] for t, ctx in self.registry.items(): - if t in seen: + if t in seen_canonical: continue - if t.kind == key.kind and t.owner == key.owner and t.repo == key.repo and t.sha is not None: + if t.owner == canonical.owner and t.repo == canonical.repo and t.sha is not None: candidates.append((t, ctx)) - has_valid_commit_date = getattr(key, "sha", None) is not None and getattr(key, "commit_date", None) is not None + has_valid_commit_date = ( + getattr(canonical, "sha", None) is not None and getattr(canonical, "commit_date", None) is not None + ) if has_valid_commit_date: def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: @@ -448,7 +557,7 @@ def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: if cand_cd is None: return (float("inf"), str(t.sha)) try: - return (abs(key.commit_date - cand_cd), str(t.sha)) + return (abs(canonical.commit_date - cand_cd), str(t.sha)) except Exception: return (float("inf"), str(t.sha)) @@ -457,14 +566,15 @@ def _sort(item: tuple[Task, DockerContext]) -> tuple[float, str]: candidates.sort(key=lambda item: str(item[0].sha)) for t, ctx in candidates: - if t not in seen: - results.append((t, ctx)) - seen.add(t) + if t not in seen_canonical: + # Present with the user's tag for downstream execution behavior + results.append((t.with_tag(user_task.tag), ctx)) + seen_canonical.add(t) - # 3) Base owner/repo for the same namespace - base = Task(owner=key.owner, repo=key.repo, sha=None, kind=key.kind) - if base in self.registry and base not in seen: - results.append((base, self.registry[base])) + # 3) Base owner/repo + base = Task(owner=canonical.owner, repo=canonical.repo, sha=None, tag="pkg") + if base in self.registry and base not in seen_canonical: + results.append((base.with_tag(user_task.tag), self.registry[base])) return results @@ -475,9 +585,8 @@ def __setitem__(self, key: str, context: DockerContext) -> None: self.register(key, context) def __contains__(self, key: str | Task) -> bool: - if isinstance(key, str): - key = self.parse_key(key) - return key in self.registry + canonical = self._canonicalize_from_key(key) + return canonical in self.registry def save_to_file(self, path: Path) -> None: dat = self.serialize(pretty=True) @@ -492,13 +601,12 @@ def load_from_file(cls, path: Path) -> ContextRegistry: def serialize(self, *, pretty: bool = False) -> str: """ - Serialize the registry (including the 'default' context) to a JSON string. - The thread lock itself is not serialized; a fresh lock will be created - when deserializing. + Serialize the registry (including the canonical 'default' context) to a JSON string. + The thread lock itself is not serialized; a fresh lock will be created when deserializing. """ with self._lock: payload = { - "version": 1, + "version": 2, # bumped: tag-insensitive storage "contexts": {repr(k): v.to_dict() for k, v in self.registry.items()}, } return json.dumps(payload, indent=2 if pretty else None, sort_keys=pretty) @@ -507,14 +615,31 @@ def serialize(self, *, pretty: bool = False) -> str: def deserialize(cls, payload: str) -> ContextRegistry: """ Reconstruct a ContextRegistry from a JSON string produced by `serialize`. - Ensures a 'default' context exists even if it wasn't present in the payload. + Ensures a canonical 'default' context exists. """ data = json.loads(payload) raw = data.get("contexts", {}) registry: dict[Task, DockerContext] = {eval(k): DockerContext.from_dict(v) for k, v in raw.items()} # noqa: S307 - # Ensure 'default' exists: - if "default" not in registry: - registry[Task(owner="default", repo="default", sha=None)] = DockerContext() + # Ensure canonical default exists + default_task_canonical = Task(owner="default", repo="default", sha=None, tag="pkg") + if default_task_canonical not in registry: + registry[default_task_canonical] = DockerContext() + + # Normalize any accidentally stored 'env' keys to canonical 'pkg' + # (in case old payloads had per-tag entries) + to_move: list[tuple[Task, DockerContext]] = [] + for t, ctx in list(registry.items()): + if t.tag != "pkg": + to_move.append((t, ctx)) + if to_move: + logger.warning( + "ContextRegistry.deserialize: Found %d non-canonical entries with tag!='pkg'; normalizing to tag='pkg'.", + len(to_move), + ) + for t, ctx in to_move: + del registry[t] + canonical = Task(owner=t.owner, repo=t.repo, sha=t.sha, commit_date=t.commit_date, tag="pkg") + registry[canonical] = ctx return cls(registry=registry) diff --git a/src/datasmith/docker/docker_build.sh b/src/datasmith/docker/docker_build.sh deleted file mode 100644 index d093a36..0000000 --- a/src/datasmith/docker/docker_build.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} -eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 -fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME - micromamba run -n "asv_${version}" pip install meson-python cython - micromamba run -n "asv_${version}" pip install --verbose --no-build-isolation --editable ${ROOT_PATH} -done diff --git a/src/datasmith/docker/docker_build_env.sh b/src/datasmith/docker/docker_build_env.sh new file mode 100644 index 0000000..23362b9 --- /dev/null +++ b/src/datasmith/docker/docker_build_env.sh @@ -0,0 +1,547 @@ +#!/usr/bin/env bash +set -euo pipefail + +# -------- Helpers installed for all shells -------- +install_profile_helpers() { + cat >/etc/profile.d/asv_utils.sh <<'EOF' +# asv_utils.sh — login/interactive shell helpers for ASV builds +export MAMBA_ROOT_PREFIX="${MAMBA_ROOT_PREFIX:-/opt/conda}" + +# Initialize micromamba for bash shells (no-op if not present) +if command -v micromamba >/dev/null 2>&1; then + eval "$(micromamba shell hook --shell=bash)" +fi + +# Find and cd into the first directory that contains an asv.*.json +cd_asv_json_dir() { + local match + match=$(find . -type f -name "asv.*.json" | head -n 1) + if [[ -n "$match" ]]; then + cd "$(dirname "$match")" || echo "Failed to change directory" + else + echo "No 'asv.*.json' file found in current directory or subdirectories." + return 1 + fi +} + +# Return just the conf filename (e.g., asv.conf.json) +asv_conf_name() { + local f + f=$(find . -type f -name "asv.*.json" | head -n 1) + [[ -n "$f" ]] && basename "$f" || return 1 +} + +# Build performance knobs (overridable) +export MAKEFLAGS="${MAKEFLAGS:--j$(nproc)}" +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-$(nproc)}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-$(nproc)}" + +# Shared pip cache to speed repeated editable builds +export PIP_CACHE_DIR="${PIP_CACHE_DIR:-/opt/pipcache}" +mkdir -p "$PIP_CACHE_DIR" +EOF +} + +# -------- Persisted build variables -------- +write_build_vars() { + local py_versions="$1" + local import_name="$2" + + mkdir -p /etc/asv_env + echo "$py_versions" > /etc/asv_env/py_versions + echo "$import_name" > /etc/asv_env/import_name + + # Exported for every future shell (pkg script, interactive, etc.) + cat >/etc/profile.d/asv_build_vars.sh </usr/local/bin/asv_smokecheck.py <<'PY' +#!/usr/bin/env python +import argparse, importlib, pathlib, sys + +def import_and_version(name: str): + m = importlib.import_module(name) + ver = getattr(m, "__version__", "unknown") + print(f"{name} imported ok; __version__={ver}") + +def probe_compiled(name: str, max_ext: int = 10): + m = importlib.import_module(name) + if not hasattr(m, "__path__"): + print("No package __path__ (likely a single-module dist); skipping compiled probe.") + return + pkg_path = pathlib.Path(list(m.__path__)[0]) + so_like = list(pkg_path.rglob("*.so")) + list(pkg_path.rglob("*.pyd")) + failed = [] + for ext in so_like[:max_ext]: + rel = ext.relative_to(pkg_path).with_suffix("") + dotted = ".".join([name] + list(rel.parts)) + try: + importlib.import_module(dotted) + except Exception as e: + failed.append((dotted, str(e))) + if failed: + print("Some compiled submodules failed to import:") + for d, err in failed: + print(" -", d, "->", err) + sys.exit(1) + else: + print("Compiled submodules (if any) import ok") + +def main(): + p = argparse.ArgumentParser() + p.add_argument("--import-name", required=True) + p.add_argument("--repo-root", default=".") + p.add_argument("--pytest-smoke", action="store_true", + help="Run a quick pytest smoke: -k 'not slow' --maxfail=1") + p.add_argument("--max-ext", type=int, default=10) + args = p.parse_args() + + import_and_version(args.import_name) + probe_compiled(args.import_name, max_ext=args.max_ext) + + if args.pytest_smoke: + import subprocess, os + if any((pathlib.Path(args.repo_root)/p).exists() for p in ("tests", "pytest.ini", "pyproject.toml")): + print("Running pytest smoke...") + rc = subprocess.call([sys.executable, "-m", "pytest", "-q", "-k", "not slow", "--maxfail=1"], cwd=args.repo_root) + if rc != 0: + sys.exit(rc) + else: + print("No tests detected; skipping pytest smoke.") + print("Smokecheck OK ✅") + +if __name__ == "__main__": + main() +PY + chmod +x /usr/local/bin/asv_smokecheck.py +} +install_smokecheck + +# -------- Install an import-name detector CLI -------- +install_detect_import_name() { + cat >/usr/local/bin/detect_import_name <<'PY' +#!/usr/bin/env python +import argparse, pathlib, re, sys, subprocess, configparser, json + +# --- optional TOML loader (py3.11+: tomllib; else tomli if available) --- +try: + import tomllib as toml +except Exception: + try: + import tomli as toml + except Exception: + toml = None + +EXCEPTIONS = { + # common dist→import mismatches + "scikit-learn": "sklearn", + "opencv-python": "cv2", + "pyyaml": "yaml", + "beautifulsoup4": "bs4", + "pillow": "PIL", + "mysqlclient": "MySQLdb", + "psycopg2-binary": "psycopg2", + "opencv-contrib-python": "cv2", + "protobuf": "google", # top-level package + "apache-beam": "apache_beam", +} + +# All the package names we typically query. +EXCEPTIONS.update({ + # --- core scientific stack --- + "scikit-learn": "sklearn", + "numpy": "numpy", + "pandas": "pandas", + "scipy": "scipy", + "scikit-image": "skimage", + "pywt": "pywt", + "xarray": "xarray", + "bottleneck": "bottleneck", + "h5py": "h5py", + "networkx": "networkx", + "shapely": "shapely", + + # --- ML / stats / optimization / viz --- + "optuna": "optuna", + "arviz": "arviz", + "pymc": "pymc", + "kedro": "kedro", + "modin": "modin", + "napari": "napari", + "deepchecks": "deepchecks", + "voyager": "voyager", # spotify/voyager + "warp": "warp", # NVIDIA/warp + "newton": "newton", # newton-physics/newton + + # --- domain / ecosystem libs --- + "geopandas": "geopandas", + "cartopy": "cartopy", + "iris": "iris", + "anndata": "anndata", + "scanpy": "scanpy", + "sunpy": "sunpy", + "pvlib-python": "pvlib", + "PyBaMM": "pybamm", + "momepy": "momepy", + "satpy": "satpy", + "pydicom": "pydicom", + "pynetdicom": "pynetdicom", + + # --- file formats / IO / infra --- + "asdf": "asdf", + "arrow": "pyarrow", # apache/arrow + "ArcticDB": "arcticdb", + "arctic": "arctic", + + # --- web / frameworks / utils --- + "django-components": "django_components", + "h11": "h11", + "tqdm": "tqdm", + "rich": "rich", + "posthog": "posthog", + "datalad": "datalad", + "ipyparallel": "ipyparallel", + + # --- numerical / symbolic / control --- + "autograd": "autograd", + "python-control": "control", + "loopy": "loopy", + "thermo": "thermo", + "chempy": "chempy", + "adaptive": "adaptive", + + # --- scientific image / signal --- + "metric-learn": "metric_learn", + + # --- quantum / physics --- + "Cirq": "cirq", + "memray": "memray", + "devito": "devito", + + # --- bio / chem / data --- + "sourmash": "sourmash", + "dipy": "dipy", + + # --- protocol buffers / codegen / outlines --- + "python-betterproto": "betterproto", + "outlines": "outlines", + + # --- DS viz / raster --- + "datashader": "datashader", + "xarray-spatial": "xarray_spatial", + + # --- misc --- + "enlighten": "enlighten", + "xorbits": "xorbits", + "geopandas": "geopandas", + "lmfit-py": "lmfit", + "mdanalysis": "MDAnalysis", + "nilearn": "nilearn", +}) + + +EXCLUDE_DIRS = { + ".git", ".hg", ".svn", ".tox", ".nox", ".venv", "venv", + "build", "dist", "__pycache__", ".mypy_cache", ".pytest_cache", + "docs", "doc", "site", "examples", "benchmarks", "tests", "testing", +} + +def _norm(s: str) -> str: + return re.sub(r"[-_.]+", "", s).lower() + +def read_pyproject(root: pathlib.Path): + cfg = {} + p = root / "pyproject.toml" + if toml and p.exists(): + try: + cfg = toml.loads(p.read_text(encoding="utf-8")) + except Exception: + pass + return cfg + +def read_setup_cfg(root: pathlib.Path): + p = root / "setup.cfg" + cp = configparser.ConfigParser() + if p.exists(): + try: + cp.read(p, encoding="utf-8") + except Exception: + pass + return cp + +def dist_name_from_config(pyproject, setup_cfg): + # PEP 621 name + name = (pyproject.get("project", {}) or {}).get("name") + if not name: + # setup.cfg [metadata] name + if setup_cfg.has_section("metadata"): + name = setup_cfg.get("metadata", "name", fallback=None) + # setup.py --name as last resort + return name + +def package_roots_from_config(root, pyproject, setup_cfg): + roots = set([root]) + # setuptools package-dir mapping + # pyproject: [tool.setuptools.package-dir] "" = "src" + pkgdir = ((pyproject.get("tool", {}) or {}).get("setuptools", {}) or {}).get("package-dir", {}) + if isinstance(pkgdir, dict): + if "" in pkgdir: + roots.add((root / pkgdir[""]).resolve()) + for _, d in pkgdir.items(): + try: + roots.add((root / d).resolve()) + except Exception: + pass + # setup.cfg [options] package_dir + if setup_cfg.has_section("options"): + raw = setup_cfg.get("options", "package_dir", fallback=None) + if raw: + # can be "=\nsrc" or mapping lines + lines = [l.strip() for l in raw.splitlines() if l.strip()] + # accept simple "=src" or "" = "src" + for ln in lines: + m = re.match(r'^("?\'?)*\s*=?\s*("?\'?)*\s*(?P[^#;]+)$', ln) + if m: + roots.add((root / m.group("path").strip()).resolve()) + # setup.cfg [options.packages.find] where + if setup_cfg.has_section("options.packages.find"): + where = setup_cfg.get("options.packages.find", "where", fallback=None) + if where: + for w in re.split(r"[,\s]+", where): + if w: + roots.add((root / w).resolve()) + return [r for r in roots if r.exists()] + +def explicit_modules_from_config(pyproject, setup_cfg): + mods = set() + # pyproject (tool.setuptools) py-modules / packages + st = ((pyproject.get("tool", {}) or {}).get("setuptools", {}) or {}) + for key in ("py-modules", "packages"): + val = st.get(key) + if isinstance(val, list): + mods.update(val) + # setup.cfg [options] py_modules / packages + if setup_cfg.has_section("options"): + for key in ("py_modules", "packages"): + raw = setup_cfg.get("options", key, fallback=None) + if raw: + for tok in re.split(r"[\s,]+", raw.strip()): + if tok and tok != "find:": + mods.add(tok) + return sorted(mods) + +def read_top_level_from_egg_info(root): + # editable installs often leave ./.egg-info/top_level.txt + for ei in root.rglob("*.egg-info"): + tl = ei / "top_level.txt" + if tl.exists(): + try: + names = [l.strip() for l in tl.read_text(encoding="utf-8").splitlines() if l.strip()] + if names: + return names + except Exception: + pass + # also consider dist-info during local builds + for di in root.rglob("*.dist-info"): + tl = di / "top_level.txt" + if tl.exists(): + try: + names = [l.strip() for l in tl.read_text(encoding="utf-8").splitlines() if l.strip()] + if names: + return names + except Exception: + pass + return None + +def walk_candidates(roots): + """Return set of plausible top-level import names under candidate roots.""" + cands = set() + for r in roots: + for path in r.rglob("__init__.py"): + try: + pkg_dir = path.parent + # skip excluded dirs anywhere in the path + if any(part in EXCLUDE_DIRS for part in pkg_dir.parts): + continue + # Construct package name relative to the nearest search root + try: + rel = pkg_dir.relative_to(r) + except Exception: + continue + if not rel.parts: + continue + top = rel.parts[0] + if top.startswith("_"): + # usually private tooling + continue + cands.add(top) + except Exception: + pass + # standalone modules at top-level of roots (py_modules case) + for mod in r.glob("*.py"): + if mod.stem not in ("setup",): + cands.add(mod.stem) + return sorted(cands) + +def score_candidates(cands, dist_name): + """Assign a score preferring names that match the dist name.""" + scores = {} + n_dist = _norm(dist_name) if dist_name else None + prefer = None + if dist_name and dist_name.lower() in EXCEPTIONS: + prefer = EXCEPTIONS[dist_name.lower()] + # also try normalized exception keys (e.g. capitalization) + for k, v in EXCEPTIONS.items(): + if _norm(k) == _norm(dist_name or ""): + prefer = v + + for c in cands: + s = 0 + if prefer and _norm(c) == _norm(prefer): + s += 100 + if n_dist and _norm(c) == n_dist: + s += 80 + if n_dist and (_norm(c).startswith(n_dist) or n_dist.startswith(_norm(c))): + s += 20 + # shorter, simpler names get a slight bump + s += max(0, 10 - len(c)) + scores[c] = s + return sorted(cands, key=lambda x: (-scores.get(x, 0), x)), scores + +def detect(root: str, return_all=False): + root = pathlib.Path(root).resolve() + + pyproject = read_pyproject(root) + setup_cfg = read_setup_cfg(root) + dist_name = dist_name_from_config(pyproject, setup_cfg) + + # 1) top_level.txt (best signal if present) + top = read_top_level_from_egg_info(root) + if top: + if return_all: + return top + # If multiple, score them + ordered, _ = score_candidates(top, dist_name or "") + return [ordered[0]] + + # 2) explicit declarations (py_modules / packages lists) + explicit = explicit_modules_from_config(pyproject, setup_cfg) + + # 3) find correct search roots (src layout, package_dir, etc.) + roots = package_roots_from_config(root, pyproject, setup_cfg) + + # 4) walk code to infer candidates + walked = walk_candidates(roots) + + # merge explicit + walked + cands = list(dict.fromkeys(explicit + walked)) # keep order & de-dup + + # 5) fallback from dist name heuristics/exceptions if still empty + if not cands and dist_name: + # exception or simple normalization + guess = EXCEPTIONS.get(dist_name.lower()) or re.sub(r"[-\.]+", "_", dist_name) + cands = [guess] + + if not cands: + return [] + + if return_all: + # return ordered list + ordered, _ = score_candidates(cands, dist_name or "") + return ordered + else: + ordered, _ = score_candidates(cands, dist_name or "") + return [ordered[0]] + +def main(): + ap = argparse.ArgumentParser(description="Detect the top-level Python import name for a repo.") + ap.add_argument("--repo-root", default=".", help="Path to repository root") + ap.add_argument("--all", action="store_true", help="Print all plausible names (JSON list)") + args = ap.parse_args() + + names = detect(args.repo_root, return_all=args.all) + if not names: + sys.exit(1) + if args.all: + print(json.dumps(names)) + else: + print(names[0]) + +if __name__ == "__main__": + main() +PY + chmod +x /usr/local/bin/detect_import_name +} + +install_detect_import_name + +# -------- Script body -------- + +install_profile_helpers +# shellcheck disable=SC1091 +source /etc/profile.d/asv_utils.sh + +# Ensure base micromamba is active for introspecting ASV config +micromamba activate base + +install_detect_import_name +install_smokecheck + +IMPORT_NAME="$(detect_import_name || true)" +if [[ -z "$IMPORT_NAME" ]]; then + echo "WARN: Could not determine import name; the pkg stage will fall back to local detection." +fi + + +# Move into the directory that contains asv.*.json +cd_asv_json_dir || { echo "No 'asv.*.json' file found." >&2; exit 1; } + +CONF_NAME="$(asv_conf_name || true)" +if [[ -z "${CONF_NAME:-}" ]]; then + echo "No 'asv.*.json' file found." >&2 + exit 1 +fi + +# Make sure tomli is available in base for pyproject parsing +micromamba install -y -n base -c conda-forge tomli >/dev/null + +# Read python versions from the ASV config +PY_VERSIONS=$(python - <&2 + exit 1 +fi +###### END SETUP CODE ###### + +# ----------------------------- +# Agent guidance (read-first) +# ----------------------------- +# GOAL: For each Python version below, install the project in EDITABLE mode into env asv_{version}, +# with NO build isolation, then run health checks. +# +# Below this comment, you should do whatever is necessary to build the project without errors. Including (but not limited to): +# - Add extra conda/pip dependencies needed to build this project. +# - Run repo-specific pre-steps (e.g., submodules, generating Cython, env vars). +# - Run arbitrary micromamba/pip commands in the target env. +# - Set CFLAGS/CXXFLAGS/LDFLAGS if needed for this repo. +# - Change files in the repo if needed (e.g., fix a missing #include). +# - Anything else needed to get a successful editable install. +# +# MUST: +# - Keep this script idempotent. +# - Use: `pip install --no-build-isolation -v -e .` or `pip install -e .` or equivalent. +# - Do not modify the SETUP CODE or helper functions below. +# +# DO NOT: +# - Change env names or Python versions outside MODEL EDIT AREA. +# - Use build isolation unless absolutely necessary. + +# ----------------------------- +# Helpers (do not modify) +# ----------------------------- +log() { printf "\033[1;34m[build]\033[0m %s\n" "$*"; } +warn() { printf "\033[1;33m[warn]\033[0m %s\n" "$*" >&2; } +die() { printf "\033[1;31m[fail]\033[0m %s\n" "$*" >&2; exit 1; } + +# Conservative default parallelism (override if the repo benefits) +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-2}" +export NPY_NUM_BUILD_JOBS="${NPY_NUM_BUILD_JOBS:-2}" + +# ----------------------------- +# Build & test across envs +# ----------------------------- +for version in $TARGET_VERSIONS; do + ENV_NAME="asv_${version}" + log "==> Building in env: $ENV_NAME (python=$version)" + + if ! micromamba env list | awk '{print $1}' | grep -qx "$ENV_NAME"; then + die "Env $ENV_NAME not found. Did docker_build_env.sh run?" + fi + + # Import name resolution (kept simple for the agent) + IMP="${IMPORT_NAME:-}" + if [[ -z "$IMP" ]]; then + if ! IMP="$(detect_import_name --repo-root "$REPO_ROOT" 2>/dev/null)"; then + die "Could not determine import name. Set IMPORT_NAME in /etc/profile.d/asv_build_vars.sh" + fi + fi + log "Using import name: $IMP" + + # ----------------------------- + # MODEL EDIT AREA: repo-specific tweaks (optional) + # ----------------------------- + # Examples (uncomment if needed for this repo): + # + # log "Updating submodules" + # git -C "$REPO_ROOT" submodule update --init --recursive + # + # log "Installing extra system libs via conda-forge" + # micromamba install -y -n "$ENV_NAME" -c conda-forge 'openblas' 'blas=*=openblas' 'libopenmp' + # + # log "Pre-generating Cython sources" + # micromamba run -n "$ENV_NAME" python -m cython --version + # + # export CFLAGS="${CFLAGS:-}" + # export CXXFLAGS="${CXXFLAGS:-}" + # export LDFLAGS="${LDFLAGS:-}" + # ----------------------------- + + # Install some generic packages needed for building/testing. + micromamba install -y -n "$ENV_NAME" -c conda-forge pip git conda mamba libmambapy \ + numpy scipy cython joblib threadpoolctl pytest \ + compilers meson-python cmake ninja pkg-config tomli + + # Editable install (no build isolation preferrably). Toolchain lives in the env already. + log "Editable install with --no-build-isolation" + PIP_NO_BUILD_ISOLATION=1 micromamba run -n "$ENV_NAME" python -m pip install --no-build-isolation -v -e "$REPO_ROOT" + + # Health checks (import + compiled extension probe; optional pytest smoke with RUN_PYTEST_SMOKE=1) + log "Running smoke checks" + micromamba run -n "$ENV_NAME" asv_smokecheck.py --import-name \"$IMP\" --repo-root \"$REPO_ROOT\" ${RUN_PYTEST_SMOKE:+--pytest-smoke} + + echo "::import_name=${IMP}::env=${ENV_NAME}" +done + +log "All builds complete ✅" diff --git a/src/datasmith/docker/probe_build.sh b/src/datasmith/docker/probe_build.sh deleted file mode 100644 index df7ecb0..0000000 --- a/src/datasmith/docker/probe_build.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bash -# probe build.sh is equivalent to docker_build.sh but it -# does not install the package in the created envs. -# Instead, it prepares the envs and copies a modified asv conf -# to /output/$COMMIT_SHA/$PYTHON_VERSION/asv.*.json -# which can then be used to run the benchmarks in a separate step. -cd_asv_json_dir() { - local match - match=$(find . -type f -name "asv.*.json" | head -n 1) - - if [[ -n "$match" ]]; then - local dir - dir=$(dirname "$match") - cd "$dir" || echo "Failed to change directory to $dir" - else - echo "No 'asv.*.json' file found in current directory or subdirectories." - fi -} -eval "$(micromamba shell hook --shell=bash)" -micromamba activate base - -ROOT_PATH=${PWD} -cd_asv_json_dir || exit 1 -CONF_NAME=$(basename "$(find . -type f -name "asv.*.json" | head -n 1)") -if [[ -z "$CONF_NAME" ]]; then - echo "No 'asv.*.json' file found in current directory or subdirectories." - exit 1 -fi -python_versions=$(python -c "import asv; pythons = asv.config.Config.load('$CONF_NAME').pythons; print(' '.join(pythons))") -for version in $python_versions; do - python -c "import asv, os, pathlib -path = pathlib.Path('/output/'\"$COMMIT_SHA\"'/''\"$version\"') -path.mkdir(parents=True, exist_ok=True) - -config = asv.config.Config.load('$CONF_NAME') -config.results_dir = str(path / 'results') -config.html_dir = str(path / 'html') - -asv.util.write_json('$CONF_NAME', config.__dict__, api_version=config.api_version) -asv.util.write_json(path / '$CONF_NAME', config.__dict__, api_version=config.api_version) -" - micromamba create -y -n "asv_${version}" -c conda-forge python="$version" git conda mamba "libmambapy<=1.9.9" numpy scipy cython joblib threadpoolctl pytest compilers - micromamba run -n "asv_${version}" pip install git+https://github.com/airspeed-velocity/asv - micromamba run -n "asv_${version}" asv machine --yes --config $CONF_NAME - micromamba run -n "asv_${version}" pip install meson-python cython -done diff --git a/src/datasmith/docker/validation.py b/src/datasmith/docker/validation.py index 6c7f4a8..37c6851 100644 --- a/src/datasmith/docker/validation.py +++ b/src/datasmith/docker/validation.py @@ -181,14 +181,13 @@ def validate_one( # noqa: C901 Returns a structured dict for JSONL summarization. """ assert task.sha is not None, "Task.sha must be set" # noqa: S101 - image_name = f"asv/{task.owner}/{task.repo}/{task.sha}".lower() - docker_ctx = context_registry[image_name] + docker_ctx = context_registry[task.get_image_name()] - build_cmd, run_cmd = format_cmds(image_name, task.owner, task.repo, task.sha, args.output_dir) + build_cmd, run_cmd = format_cmds(task.get_image_name(), task.owner, task.repo, task.sha, args.output_dir) build_res: BuildResult = docker_ctx.build_container_streaming( client=client, - image_name=image_name, + image_name=task.get_image_name(), build_args={ "REPO_URL": f"https://www.github.com/{task.owner}/{task.repo}", "COMMIT_SHA": task.sha, @@ -206,7 +205,7 @@ def validate_one( # noqa: C901 build_stage = "build-ok" if not build_res.ok: - return _handle_build_error(task, build_cmd, run_cmd, build_res, args, image_name, build_stage) + return _handle_build_error(task, build_cmd, run_cmd, build_res, args, task.get_image_name(), build_stage) # --- RUN --- # prepare env (clone default Machine args and set machine=sha) @@ -221,9 +220,9 @@ def validate_one( # noqa: C901 files = {} try: container = client.containers.run( - image=image_name, + image=task.get_image_name(), detach=True, - name=f"{image_name.replace('/', '-')}-validation", + name=task.get_container_name(), environment=env, volumes={str((args.output_dir / "results").absolute()): {"bind": "/output", "mode": "rw"}}, ) @@ -244,7 +243,7 @@ def validate_one( # noqa: C901 try: files = log_container_output(container, archive="/output") except Exception: - logger.exception("Failed to archive output for %s", image_name) + logger.exception("Failed to archive output for %s", task.get_image_name()) ok = rc == 0 @@ -259,14 +258,14 @@ def validate_one( # noqa: C901 if not ok: return _handle_run_error( - task, build_cmd, run_cmd, rc, logs_tail, args, image_name, run_stage, build_stage, files + task, build_cmd, run_cmd, rc, logs_tail, args, task.get_image_name(), run_stage, build_stage, files ) - return { # noqa: TRY300 + return { "owner": task.owner, "repo": task.repo, "sha": task.sha, - "image_name": image_name, + "image_name": task.get_image_name(), "stage": f"{run_stage}+{build_stage}", "ok": ok, "rc": rc, @@ -278,11 +277,11 @@ def validate_one( # noqa: C901 "files": files, } except Exception: - return _handle_run_exception(task, build_cmd, run_cmd, args, image_name, build_stage) + return _handle_run_exception(task, build_cmd, run_cmd, args, task.get_image_name(), build_stage) finally: # best-effort cleanup try: if container: container.remove(force=True) except Exception: - logger.exception("Failed to remove container for %s", image_name) + logger.exception("Failed to remove container for %s", task.get_image_name()) diff --git a/src/datasmith/execution/collect_commits_offline.py b/src/datasmith/execution/collect_commits_offline.py index b04b987..1602609 100644 --- a/src/datasmith/execution/collect_commits_offline.py +++ b/src/datasmith/execution/collect_commits_offline.py @@ -8,12 +8,14 @@ import urllib.parse from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait from pathlib import Path +from typing import Callable from git import GitCommandError, Repo from tqdm.auto import tqdm from datasmith import logger from datasmith.agents.perf_judge import PerfClassifier +from datasmith.execution.utils import get_change_summary from datasmith.utils import CACHE_LOCATION, cache_completion _PR_MERGE_PATTERNS: tuple[re.Pattern[str], ...] = ( @@ -21,6 +23,10 @@ re.compile(r"Merge pull request #(\d+)\b"), # squash-merge style "... (#[0-9]+)" on the last line re.compile(r"\(#(\d+)\)"), + # Refers to an issue/PR number. GH-{number} + re.compile(r"(?:\b|GH-)(\d+)\b"), + # Has a hashtag followed by a number. #123 + re.compile(r"#(\d+)\b"), ) @@ -44,7 +50,133 @@ def _is_pr_merge(message: str) -> bool: return any(p.search(message) for p in _PR_MERGE_PATTERNS) -def find_parent_commits(repo_name: str, commits: list[str]) -> list[str]: +def find_tagged_commits(repo: Repo) -> list[str]: + merge_shas: set[str] = set() + for tag in repo.tags: + if tag.commit.hexsha not in merge_shas: + merge_shas.add(tag.commit.hexsha) + + logger.debug(f"Collected {len(merge_shas)} commits from {repo.working_dir}.") + return sorted(merge_shas) + + +def find_parent_commits(repo: Repo, commits: list[str], add_first: bool = False) -> list[str]: + parent_commits = set() + for commit_sha in commits: + try: + commit = repo.commit(commit_sha) + # Add parent commits if they exist + parents = commit.parents + if add_first and len(parents): + # only keep the first parent. + parents = [parents[0]] + + for parent in parents: + parent_commits.add(parent.hexsha) + except Exception as e: + logger.warning(f"Could not find commit {commit_sha} in {repo.working_dir}: {e}") + + logger.debug(f"Collected {len(parent_commits)} parent commits from {repo.working_dir}.") + return sorted(parent_commits) + + +def collect_commits(repo: Repo) -> list[str]: + """ + Collect all commit SHAs from the given bare repository. + """ + branch = _default_branch(repo) + ref_to_walk = f"origin/{branch}" + commits = [c.hexsha for c in repo.iter_commits(ref_to_walk)] + tagged_commits = find_tagged_commits(repo) + parent_commits = find_parent_commits(repo, commits + tagged_commits, add_first=True) + + return sorted(set(commits + tagged_commits + parent_commits)) + + +def _parallel_classify( + commits: list[tuple[str, str | bytes, str]], + process_commit_tuple: Callable[[tuple[str, str | bytes, str]], str | None], + repo_name: str, + n_workers: int, +) -> set[str]: + merge_shas: set[str] = set() + max_workers = n_workers + window = max_workers * 4 + + with ThreadPoolExecutor(max_workers=max_workers) as ex: + pbar = tqdm( + total=len(commits), + desc=f"Walking {repo_name} commits", + unit="commit", + file=sys.stdout, + miniters=1, + mininterval=0.1, + ) + + it = iter(commits) + pending = set() + + for _ in range(min(window, len(commits))): + pending.add(ex.submit(process_commit_tuple, next(it))) + + while pending: + done, pending = wait(pending, return_when=FIRST_COMPLETED) + + for fut in done: + try: + sha = fut.result() + if sha: + merge_shas.add(sha) + except Exception: + logger.exception("Worker failed") + finally: + pbar.update(1) + + with contextlib.suppress(StopIteration): + pending.add(ex.submit(process_commit_tuple, next(it))) + + pbar.close() + + logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") + return merge_shas + + +def batch_classify_commits( + perf_classifier: PerfClassifier, repo_name: str, commits: list[tuple[str, str | bytes, str]], n_workers: int +) -> set[str]: + def process_commit_tuple(t: tuple[str, str | bytes, str]) -> str | None: + hexsha, message, changes_summary = t + full_msg = message.strip() + + if not _is_pr_merge(str(full_msg)): + logger.debug(f"Skipping commit {hexsha}:{full_msg!s} as it is not a PR merge.") + return None + + full_msg = re.sub(r"\nSigned-off-by:.*", "", str(full_msg)).replace("\n\n", "\n").strip() + if len(full_msg.split()) > 2048: + full_msg = " ".join(full_msg.split()[:2048]) + "..." + + is_perf, agent_trace = perf_classifier.get_response(message=str(full_msg), file_change_summary=changes_summary) + if not is_perf: + logger.debug(f"Skipping commit {hexsha} as it is not a performance commit.") + logger.debug(f"Agent trace: {agent_trace}") + return None + + return hexsha + + if n_workers < 0: + merge_shas: set[str] = set() + for t in tqdm(commits, desc=f"Walking {repo_name} commits", unit="commit", file=sys.stdout): + sha = process_commit_tuple(t) + if sha: + merge_shas.add(sha) + logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") + return merge_shas + else: + return _parallel_classify(commits, process_commit_tuple, repo_name, n_workers) + + +def find_parent_releases(repo_name: str, commits: list[str]) -> list[str]: """ Return a list of commit SHAs that are parent commits of the given commits, **without** calling any GitHub API endpoints. @@ -74,18 +206,7 @@ def find_parent_commits(repo_name: str, commits: list[str]) -> list[str]: return [] raise - parent_commits = set() - for commit_sha in commits: - try: - commit = repo.commit(commit_sha) - # Add parent commits if they exist - for parent in commit.parents: - parent_commits.add(parent.hexsha) - except Exception as e: - logger.warning(f"Could not find commit {commit_sha} in {repo_name}: {e}") - - logger.info(f"Collected {len(parent_commits)} parent commits from {repo_name}.") - return sorted(parent_commits) + return find_parent_commits(repo, commits) def find_tagged_releases(repo_name: str) -> list[str]: @@ -118,18 +239,11 @@ def find_tagged_releases(repo_name: str) -> list[str]: return [] raise - merge_shas: set[str] = set() - for tag in repo.tags: - if tag.commit.hexsha not in merge_shas: - merge_shas.add(tag.commit.hexsha) - - logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") - - return sorted(merge_shas) + return find_tagged_commits(repo) @cache_completion(CACHE_LOCATION, "find_perf_commits") -def find_perf_commits( # noqa: C901 +def find_perf_commits( repo_name: str, query: str, max_pages: int = 100, # ignored (kept for compatibility) @@ -184,69 +298,6 @@ def find_perf_commits( # noqa: C901 branch = base_branch or _default_branch(repo) ref_to_walk = f"origin/{branch}" - commits = [(c.hexsha, c.message) for c in repo.iter_commits(ref_to_walk)] - - def process_commit_tuple(t: tuple[str, str | bytes]) -> str | None: - hexsha, message = t - full_msg = message.strip() - - if not _is_pr_merge(str(full_msg)): - logger.debug(f"Skipping commit {hexsha} as it is not a PR merge.") - return None - - full_msg = re.sub(r"\nSigned-off-by:.*", "", str(full_msg)).replace("\n\n", "\n").strip() - if len(full_msg.split()) > 2048: - full_msg = " ".join(full_msg.split()[:2048]) + "..." - - is_perf, agent_trace = perf_classifier.get_response(message=str(full_msg)) - if not is_perf: - logger.debug(f"Skipping commit {hexsha} as it is not a performance commit.") - logger.debug(f"Agent trace: {agent_trace}") - return None - - return hexsha - - merge_shas: set[str] = set() - max_workers = n_workers - # keep a small multiple of workers in-flight; adjust if you want more buffering - window = max_workers * 4 - - with ThreadPoolExecutor(max_workers=max_workers) as ex: - pbar = tqdm( - total=len(commits), - desc=f"Walking {repo_name} commits", - unit="commit", - file=sys.stdout, - miniters=1, - mininterval=0.1, - ) - - it = iter(commits) - pending = set() - - # prime the window - for _ in range(min(window, len(commits))): - pending.add(ex.submit(process_commit_tuple, next(it))) - - while pending: - done, pending = wait(pending, return_when=FIRST_COMPLETED) - - for fut in done: - try: - sha = fut.result() - if sha: - merge_shas.add(sha) - except Exception: - # don't let one bad task kill the progress loop - logger.exception("Worker failed") - finally: - pbar.update(1) - - # backfill one task for each completed, keeping the window steady - with contextlib.suppress(StopIteration): - pending.add(ex.submit(process_commit_tuple, next(it))) - - pbar.close() - - logger.info(f"Collected {len(merge_shas)} commits from {repo_name}.") + commits = [(c.hexsha, c.message, get_change_summary(c)) for c in repo.iter_commits(ref_to_walk)] + merge_shas = batch_classify_commits(perf_classifier, repo_name, commits, n_workers) return sorted(merge_shas) diff --git a/src/datasmith/execution/utils.py b/src/datasmith/execution/utils.py index 8a37ddc..ca58410 100644 --- a/src/datasmith/execution/utils.py +++ b/src/datasmith/execution/utils.py @@ -152,6 +152,24 @@ def has_asv(repo: Repo, c: Commit) -> bool: return any(obj.type == "blob" and obj.name == "asv.conf.json" for obj in c.tree.traverse()) # type: ignore[union-attr] +def get_change_summary(commit: Commit) -> str: + """ + Generate a summary of changes made in the commit. + This should be a fast operation. + The summary should be a markdown table of the files changed, lines added, lines removed, and total changes. + """ + stats = commit.stats + summary_lines = [ + "| File | Lines Added | Lines Removed | Total Changes |", + "|------|-------------|----------------|----------------|", + ] + for file_path, file_stats in stats.files.items(): + summary_lines.append( + f"| {file_path} | {file_stats['insertions']} | {file_stats['deletions']} | {file_stats['lines']} |" + ) + return "\n".join(summary_lines) + + @cache_completion(CACHE_LOCATION, "get_commit_info_offline") def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> dict[str, Any]: """ @@ -172,6 +190,7 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> "files_changed": "", "patch": "", "has_asv": False, + "file_change_summary": "", } try: commit = repo.commit(commit_sha) @@ -202,6 +221,7 @@ def _get_commit_info_offline(repo: Repo, commit_sha: str, bypass_cache=True) -> "files_changed": "\n".join(str(k) for k in stats.files), "patch": patch, "has_asv": has_asv(repo, commit), + "file_change_summary": get_change_summary(commit), }